简体   繁体   中英

How can I reduce the file size of a .mov video created with AVCaptureSession?

I am able to record a video using AVCaptureSession .But i want to reduce it size.How can i do this ? I am getting final URL in Delegate "captureOutput" method.

    VideoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    VideoInputDevice = [AVCaptureDeviceInput deviceInputWithDevice:VideoDevice error:&error]; 
    if (!error) 
    { 
        if ([CaptureSession canAddInput:VideoInputDevice]) 
        {
            [CaptureSession addInput:VideoInputDevice]; 
        }
        else 
        {
            NSLog(@"Couldn't add video input"); 
        } 
    else 
    { 
        NSLog(@"Couldn't create video input"); 
    } 
} 
else 
{ 
    NSLog(@"Couldn't create video capture device"); 
} 

AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; 

I hope this method will be helpful for u

- (void)compressingVideoWithSoundWithInputURL:(NSURL*)inputURL outputURL:(NSURL*)outputURL andResolution:(CGSize)resolution
{
    self.myVideoWriter = nil;

    [[NSFileManager defaultManager] removeItemAtURL:outputURL error:nil];

    //setup video writer
    AVAsset *videoAsset = [[AVURLAsset alloc] initWithURL:inputURL options:nil];

    AVAssetTrack *videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];

    NSDictionary *videoWriterCompressionSettings =  [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:1250000], AVVideoAverageBitRateKey, nil];

    NSDictionary *videoWriterSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey, videoWriterCompressionSettings, AVVideoCompressionPropertiesKey, [NSNumber numberWithFloat:resolution.width], AVVideoWidthKey, [NSNumber numberWithFloat:resolution.height], AVVideoHeightKey, nil];

    AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
                                            assetWriterInputWithMediaType:AVMediaTypeVideo
                                            outputSettings:videoWriterSettings];

    videoWriterInput.expectsMediaDataInRealTime = YES;

    videoWriterInput.transform = videoTrack.preferredTransform;


    NSError* writerError = nil;

    self.myVideoWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:&writerError];

    if (writerError) {
        NSLog(@"Writer error: %@", writerError);
        NSString *message = [NSString stringWithFormat:@"VideoEditor. compressingVideoWithInputURL: outputURL: andResolution:. Writer error: %@", writerError];
        FLog(message);
        return;
    }



    [self.myVideoWriter addInput:videoWriterInput];

    //setup video reader
    NSDictionary *videoReaderSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];

    AVAssetReaderTrackOutput *videoReaderOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoReaderSettings];

    AVAssetReader *videoReader = [[AVAssetReader alloc] initWithAsset:videoAsset error:nil];

    [videoReader addOutput:videoReaderOutput];

    //setup audio writer
    AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput
                                            assetWriterInputWithMediaType:AVMediaTypeAudio
                                            outputSettings:nil];

    audioWriterInput.expectsMediaDataInRealTime = NO;

    [self.myVideoWriter addInput:audioWriterInput];

    //setup audio reader
    AVAssetTrack* audioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];

    AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];

    AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:videoAsset error:nil];

    [audioReader addOutput:audioReaderOutput];

    [self.myVideoWriter startWriting];

    //start writing from video reader
    [videoReader startReading];

    [self.myVideoWriter startSessionAtSourceTime:kCMTimeZero];

    dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue1", DISPATCH_QUEUE_SERIAL);

    [videoWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:
     ^{

         while ([videoWriterInput isReadyForMoreMediaData]) {

             CMSampleBufferRef sampleBuffer;

             if ([videoReader status] == AVAssetReaderStatusReading &&
                 (sampleBuffer = [videoReaderOutput copyNextSampleBuffer])) {

                 [videoWriterInput appendSampleBuffer:sampleBuffer];
                 CFRelease(sampleBuffer);
             }

             else {

                 [videoWriterInput markAsFinished];

                 if ([videoReader status] == AVAssetReaderStatusCompleted) {
                     if ([audioReader status] == AVAssetReaderStatusReading || [audioReader status] == AVAssetReaderStatusCompleted) {

                     }
                     else{
                         //start writing from audio reader
                         [audioReader startReading];

                         [self.myVideoWriter startSessionAtSourceTime:kCMTimeZero];

                         dispatch_queue_t processingQueue = dispatch_queue_create("processingQueue2", NULL);

                         [audioWriterInput requestMediaDataWhenReadyOnQueue:processingQueue usingBlock:^{

                             while (audioWriterInput.readyForMoreMediaData) {

                                 CMSampleBufferRef sampleBuffer;

                                 if ([audioReader status] == AVAssetReaderStatusReading &&
                                     (sampleBuffer = [audioReaderOutput copyNextSampleBuffer])) {

                                     [audioWriterInput appendSampleBuffer:sampleBuffer];
                                     CFRelease(sampleBuffer);
                                 }
                                 else {

                                     [audioWriterInput markAsFinished];

                                     if ([audioReader status] == AVAssetReaderStatusCompleted)
                                     {
                                         [self finishWritingAction];
                                         break;
                                     }

                                 }
                             }

                         }
                          ];
                     }
                 }

             }
         }
     }
     ];
}

You want to use an AVAssetExportSession. You code will look something like this:

AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:myAsset presetName:exportPreset];

exporter.videoComposition = _videoComposition;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = NO;
url = [url URLByAppendingPathExtension:CFBridgingRelease(UTTypeCopyPreferredTagWithClass((__bridge CFStringRef)(exporter.outputFileType), kUTTagClassFilenameExtension))];
exporter.outputURL = url;

[exporter exportAsynchronouslyWithCompletionHandler:^{
    // handle the completion in a way meaningful to your app
}];

You export preset should be one of these:

AVF_EXPORT NSString *const AVAssetExportPreset640x480           NS_AVAILABLE(10_7, 4_0);
AVF_EXPORT NSString *const AVAssetExportPreset960x540           NS_AVAILABLE(10_7, 4_0);
AVF_EXPORT NSString *const AVAssetExportPreset1280x720          NS_AVAILABLE(10_7, 4_0);
AVF_EXPORT NSString *const AVAssetExportPreset1920x1080         NS_AVAILABLE(10_7, 5_0);
AVF_EXPORT NSString *const AVAssetExportPreset3840x2160         NS_AVAILABLE(10_10, NA);

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM