[英]Using AVCapture to write video and audio separately, but simultaneously
I would like to capture video and audio separately, but simultaneously, so that in the end I have a .mp4 file and a .wav (or similar audio format) in my NSDocuments folder. 我想分别捕获视频和音频,但同时,我最终在我的NSDocuments文件夹中有.mp4文件和.wav(或类似的音频格式)。 So far I have this, but it is not even calling - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
. 到目前为止我有这个,但它甚至没有调用- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
。
Any ideas what is wrong here? 任何想法在这里有什么不对?
#import "VideoCaptureViewController.h"
#import <AssetsLibrary/AssetsLibrary.h>
#import "SMFileManager.h"
@interface VideoCaptureViewController () {
AVCaptureSession *session_;
AVCaptureDevice *captureDevice_;
AVCaptureDeviceInput *deviceInput_;
AVCaptureMovieFileOutput *captureMovieFileOutput_;
BOOL recording_;
AVCaptureAudioDataOutput *audioDataOutput_;
NSString *filePathCapturedVideo_;
}
- (void) toggleRecording;
- (void) endRecording;
@end
@implementation VideoCaptureViewController
@synthesize delegate;
@synthesize previewLayer;
- (void)viewDidLoad
{
[super viewDidLoad];
session_ = [[AVCaptureSession alloc] init];
// Add video input.
NSArray *videoDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
// AVCaptureDevice *captureDevice = nil;
captureDevice_ = nil;
for (AVCaptureDevice *device in videoDevices) {
if (device.position == AVCaptureDevicePositionFront) {
captureDevice_ = device;
break;
}
}
// AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if (captureDevice_) {
NSError *error;
deviceInput_ = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice_ error:&error];
if (!error){
if ([session_ canAddInput:deviceInput_])
[session_ addInput:deviceInput_];
else
NSLog(@"Couldn't add video input");
} else {
NSLog(@"Couldn't create video input");
}
} else {
NSLog(@"Couldn't create video capture device");
}
//ADD AUDIO INPUT
NSLog(@"Adding audio input");
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
if (audioInput) {
[session_ addInput:audioInput];
}
//----- ADD OUTPUTS -----
// audio
audioDataOutput_= [[AVCaptureAudioDataOutput alloc] init];
[session_ addOutput:audioDataOutput_];
dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
[audioDataOutput_ setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
//ADD VIDEO PREVIEW LAYER
NSLog(@"Adding video preview layer");
[self setPreviewLayer:[[[AVCaptureVideoPreviewLayer alloc] initWithSession:session_] autorelease]];
previewLayer.orientation = AVCaptureVideoOrientationPortrait; //<<SET ORIENTATION. You can deliberatly set this wrong to flip the image and may actually need to set it wrong to get the right image
[[self previewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill];
//ADD MOVIE FILE OUTPUT
NSLog(@"Adding movie file output");
captureMovieFileOutput_ = [[AVCaptureMovieFileOutput alloc] init];
// Float64 TotalSeconds = 60; //Total seconds
// int32_t preferredTimeScale = CAPTURE_FRAMES_PER_SECOND; //Frames per second
// CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale); //<<SET MAX DURATION
// captureMovieFileOutput_.maxRecordedDuration = maxDuration;
captureMovieFileOutput_.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME
if ([session_ canAddOutput:captureMovieFileOutput_])
[session_ addOutput:captureMovieFileOutput_];
//SET THE CONNECTION PROPERTIES (output properties)
[self CameraSetOutputProperties]; //(We call a method as it also has to be done after changing camera)
//----- SET THE IMAGE QUALITY / RESOLUTION -----
//Options:
// AVCaptureSessionPresetHigh - Highest recording quality (varies per device)
// AVCaptureSessionPresetMedium - Suitable for WiFi sharing (actual values may change)
// AVCaptureSessionPresetLow - Suitable for 3G sharing (actual values may change)
// AVCaptureSessionPreset640x480 - 640x480 VGA (check its supported before setting it)
// AVCaptureSessionPreset1280x720 - 1280x720 720p HD (check its supported before setting it)
// AVCaptureSessionPresetPhoto - Full photo resolution (not supported for video output)
NSLog(@"Setting image quality");
[session_ setSessionPreset:AVCaptureSessionPresetHigh];
if ([session_ canSetSessionPreset:AVCaptureSessionPreset1280x720]) { //Check size based configs are supported before setting them
NSLog(@"1280x720 confirmed!");
[session_ setSessionPreset:AVCaptureSessionPreset1280x720];
}
//----- DISPLAY THE PREVIEW LAYER -----
//Display it full screen under out view controller existing controls
NSLog(@"Display the preview layer");
CGRect layerRect = [[[self view] layer] bounds];
[previewLayer setBounds:layerRect];
[previewLayer setPosition:CGPointMake(CGRectGetMidX(layerRect), CGRectGetMidY(layerRect))];
//[[[self view] layer] addSublayer:[[self CaptureManager] previewLayer]];
//We use this instead so it goes on a layer behind our UI controls (avoids us having to manually bring each control to the front):
UIView *CameraView = [[[UIView alloc] init] autorelease];
[[self view] addSubview:CameraView];
[self.view sendSubviewToBack:CameraView];
[[CameraView layer] addSublayer:previewLayer];
//----- START THE CAPTURE SESSION RUNNING -----
[session_ startRunning];
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
return (interfaceOrientation == UIDeviceOrientationPortrait);
}
//********** VIEW WILL APPEAR **********
//View about to be added to the window (called each time it appears)
//Occurs after other view's viewWillDisappear
- (void)viewWillAppear:(BOOL)animated
{
NSLog(@"%s",__func__);
[super viewWillAppear:animated];
recording_ = NO;
}
- (void) viewDidAppear:(BOOL)animated
{
NSLog(@"%s",__func__);
UIButton *startStopButton = [UIButton buttonWithType:UIButtonTypeCustom];
startStopButton.frame = CGRectMake(self.view.center.x, self.view.frame.size.height - 80, 100, 50);
[startStopButton setTitle:@"record" forState:UIControlStateNormal];
[startStopButton addTarget:self action:@selector(toggleRecording) forControlEvents:UIControlEventTouchUpInside];
startStopButton.enabled = YES;
startStopButton.backgroundColor = [UIColor grayColor];
[self.view addSubview:startStopButton];
// [NSTimer timerWithTimeInterval:5 target:self selector:@selector(toggleRecording) userInfo:nil repeats:NO];
// [NSTimer scheduledTimerWithTimeInterval:5 target:self selector:@selector(endRecording) userInfo:nil repeats:NO];
}
- (void) endRecording
{
[self dismissModalViewControllerAnimated:YES];
}
//********** CAMERA SET OUTPUT PROPERTIES **********
- (void) CameraSetOutputProperties
{
//SET THE CONNECTION PROPERTIES (output properties)
AVCaptureConnection *CaptureConnection = [captureMovieFileOutput_ connectionWithMediaType:AVMediaTypeVideo];
//Set landscape (if required)
if ([CaptureConnection isVideoOrientationSupported]){
// [CaptureConnecti on setVideoOrientation:[UIDevice currentDevice].orientation];
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
[CaptureConnection setVideoOrientation:orientation];
}
//Set frame rate (if requried)
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
if (CaptureConnection.supportsVideoMinFrameDuration)
CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
if (CaptureConnection.supportsVideoMaxFrameDuration)
CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND);
CMTimeShow(CaptureConnection.videoMinFrameDuration);
CMTimeShow(CaptureConnection.videoMaxFrameDuration);
}
//********** GET CAMERA IN SPECIFIED POSITION IF IT EXISTS **********
- (AVCaptureDevice *) CameraWithPosition:(AVCaptureDevicePosition) Position
{
NSArray *Devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *Device in Devices)
{
if ([Device position] == Position)
{
return Device;
}
}
return nil;
}
//********** CAMERA TOGGLE **********
- (IBAction)CameraToggleButtonPressed:(id)sender
{
if ([[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count] > 1) //Only do if device has multiple cameras
{
NSLog(@"Toggle camera");
NSError *error;
//AVCaptureDeviceInput *videoInput = [self videoInput];
AVCaptureDeviceInput *NewVideoInput;
AVCaptureDevicePosition position = [[deviceInput_ device] position];
if (position == AVCaptureDevicePositionBack)
{
NewVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self CameraWithPosition:AVCaptureDevicePositionFront] error:&error];
}
else if (position == AVCaptureDevicePositionFront){
NewVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self CameraWithPosition:AVCaptureDevicePositionBack] error:&error];
}
if (NewVideoInput != nil)
{
[session_ beginConfiguration]; //We can now change the inputs and output configuration. Use commitConfiguration to end
[session_ removeInput:deviceInput_];
if ([session_ canAddInput:NewVideoInput])
{
[session_ addInput:NewVideoInput];
deviceInput_ = NewVideoInput;
}
else
{
[session_ addInput:deviceInput_];
}
//Set the connection properties again
[self CameraSetOutputProperties];
[session_ commitConfiguration];
[NewVideoInput release];
}
}
}
- (void)toggleRecording
{
NSLog(@"%s",__func__);
if (!recording_){
NSLog(@"START RECORDING");
recording_ = YES;
NSString* documentsDirectory= [SMFileManager applicationDocumentsDirectory];
filePathCapturedVideo_ = [documentsDirectory stringByAppendingPathComponent:@"captured-video.mp4"];
NSLog(@"storing file at %@", filePathCapturedVideo_);
NSURL *url = [[NSURL alloc] initFileURLWithPath: filePathCapturedVideo_];
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:filePathCapturedVideo_]) {
NSError *error;
if ([fileManager removeItemAtPath:filePathCapturedVideo_ error:&error] == NO) {
//Error - handle if requried
}
}
//Start recording
[captureMovieFileOutput_ startRecordingToOutputFileURL:url recordingDelegate:self];
} else {
NSLog(@"STOP RECORDING");
recording_ = NO;
[captureMovieFileOutput_ stopRecording];
}
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
NSLog(@"%s",__func__);
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
NSLog(@"%@", outputFileURL);
NSLog(@"%@", captureOutput);
NSString* documentsDirectory= [SMFileManager applicationDocumentsDirectory];
filePathCapturedVideo_ = [documentsDirectory stringByAppendingPathComponent:@"captured-video.mp4"];
[self.delegate videoCaptured:filePathCapturedVideo_];
}
@end
I have had similar problems, I have concluded it is not possible to use both of these delegates at the same time. 我有类似的问题,我已经得出结论,不可能同时使用这两个代表。 The best solution is to use only DidOutputSampleBuffer and AVAssetWriter to Save the files separately. 最好的解决方案是仅使用DidOutputSampleBuffer和AVAssetWriter分别保存文件。
声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.