簡體   English   中英

AVAssetWriter和AVAssetWriterInputPixelBufferAdaptor appendingPixelBuffer組合失敗

[英]Failure of combination of AVAssetWriter and AVAssetWriterInputPixelBufferAdaptor appendingPixelBuffer

我試圖結合使用提供的用於編碼資產 編寫器的示例和從pixelBufferFromCGImage提供的示例的組合,以將UIImage覆蓋在我要導出的AVAset之上。

問題是,盡管此調用的結果為True

[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];

導出的avasset損壞,顯示意外大小,並且隨后對其進行訪問失敗,並顯示“此媒體可能已損壞”。

如果我避免嘗試使用appendPixelBuffer調用,則導出本身可以成功完成。 但是,將其定位在分派隊列之前或之內會產生相同的故障。

希望這里不要多余地發布,但是堆棧溢出中的其他示例似乎並未解決這種特定的組合失敗。 謝謝,下面的代碼

出口代碼

 AVAsset *sourceAsset = [AVAsset assetWithURL:outputUrl];

NSError *error = nil;

NSString *fileName = [NSString stringWithFormat:@"non_transform_%f.mov", [[NSDate date] timeIntervalSince1970]];
NSString *combinedPath = [NSString stringWithFormat:@"file://localhost%@/%@", [[GroupDiskManager sharedManager] getFolderPath], fileName];

NSURL *outputURL = [NSURL URLWithString:combinedPath];
NSLog(@"combined path: %@", combinedPath);

AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:&error];


AVAssetTrack *videoTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *audioTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];


NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                               AVVideoCodecH264, AVVideoCodecKey,
                               [NSNumber numberWithInt:1280], AVVideoWidthKey,
                               [NSNumber numberWithInt:720], AVVideoHeightKey,
                               nil];

AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput
                                         assetWriterInputWithMediaType:AVMediaTypeVideo
                                         outputSettings:videoSettings] retain];






NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init];
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:1280] forKey:(NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithUnsignedInt:720] forKey:(NSString*)kCVPixelBufferHeightKey];


AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                 assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
                                                 sourcePixelBufferAttributes:attributes];


NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = NO;
[videoWriter addInput:videoWriterInput];

NSError *aerror = nil;

NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *asset_reader_output = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoOptions];

AVAssetReader *reader = [[AVAssetReader alloc] initWithAsset:sourceAsset error:&aerror];
[reader addOutput:asset_reader_output];



AVAssetWriterInput* audioWriterInput = [[AVAssetWriterInput
                                         assetWriterInputWithMediaType:AVMediaTypeAudio
                                         outputSettings:nil] retain];
AVAssetReader *audioReader = [[AVAssetReader assetReaderWithAsset:sourceAsset error:&error] retain];




AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
[audioReader addOutput:audioReaderOutput];
NSParameterAssert(audioWriterInput);
NSParameterAssert([videoWriter canAddInput:audioWriterInput]);
audioWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:audioWriterInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
[reader startReading];


CVPixelBufferRef buffer = [ImageToMovieManager pixelBufferFromCGImage:[UIImage imageNamed:@"234_1280x720_3.jpg"].CGImage size:CGSizeMake(1280, 720)];
BOOL theResult = [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];

if (theResult == NO) //failes on 3GS, but works on iphone 4
    NSLog(@"failed to append buffer");

if(buffer) {
    CVBufferRelease(buffer);
}




dispatch_queue_t _processingQueue = dispatch_queue_create("_processingQueue", NULL);
[videoWriterInput requestMediaDataWhenReadyOnQueue:_processingQueue usingBlock:
 ^{
     NSLog(@"requestMediaDataWhenReadyOnQueue");

     [self retain];

     while ([videoWriterInput isReadyForMoreMediaData]) {



         CMSampleBufferRef sampleBuffer;
         if ([reader status] == AVAssetReaderStatusReading &&
             (sampleBuffer = [asset_reader_output copyNextSampleBuffer])) {

             BOOL result = [videoWriterInput appendSampleBuffer:sampleBuffer];
             CFRelease(sampleBuffer);

             if (!result) {
                 NSLog(@" result == nil Cancel!");
                 NSLog(@"videoWriter.error: %@", videoWriter.error);
                 [reader cancelReading];
                 break;

             }
         } else {
             NSLog(@"[videoWriterInput markAsFinished]");

             [videoWriterInput markAsFinished];

             switch ([reader status]) {
                 case AVAssetReaderStatusReading:
                     NSLog(@"reading");
                     // the reader has more for other tracks, even if this one is done
                     break;

                 case AVAssetReaderStatusCompleted:
                     NSLog(@"AVAssetReaderStatusCompleted");

                     [audioReader startReading];
                     dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
                     [audioWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^
                      {

                          while (audioWriterInput.readyForMoreMediaData) {
                              CMSampleBufferRef nextBuffer;
                              if ([audioReader status] == AVAssetReaderStatusReading &&
                                  (nextBuffer = [audioReaderOutput copyNextSampleBuffer])) {
                                  if (nextBuffer) {
                                      [audioWriterInput appendSampleBuffer:nextBuffer];
                                  }
                              }else{
                                  [audioWriterInput markAsFinished];
                                  switch ([audioReader status]) {
                                      case AVAssetReaderStatusCompleted:
                                          NSLog(@"AVAssetReaderStatusCompleted!!");
                                          [videoWriter finishWriting];
                                          [VideoManager videoSavedWithURL:outputURL withError:(NSError *)error];
                                          break;
                                  }
                              }
                          }

                      }
                      ];
                     break;

                 case AVAssetReaderStatusFailed:
                     NSLog(@"AVAssetReaderStatusFailed");
                     [videoWriter cancelWriting];
                     break;
             }
             break;
         }
     }
 }

 ];

pixelBufferFromCGImageCode

 CGSize frameSize = CGSizeMake(CGImageGetWidth(image), CGImageGetHeight(image));
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                         [NSNumber numberWithBool:NO], kCVPixelBufferCGImageCompatibilityKey,
                         [NSNumber numberWithBool:NO], kCVPixelBufferCGBitmapContextCompatibilityKey,
                         nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, frameSize.width,
                                      frameSize.height,  kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, (CFDictionaryRef) options,
                                      &pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);

CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);


CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, frameSize.width,
                                             frameSize.height, 8, 4*frameSize.width, rgbColorSpace,
                                             kCGImageAlphaNoneSkipLast);

CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
                                       CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);

CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

return pxbuffer;

至少應將pixelFormat指定為kCVPixelFormatType_32BGRA,而不是kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange。

暫無
暫無

聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.

 
粵ICP備18138465號  © 2020-2024 STACKOOM.COM