繁体   English   中英

iOS AVAssetExportSession失败代码= -11820仅iPhone 5(c)

[英]iOS AVAssetExportSession failed Code=-11820 only iPhone 5(c)

我想从包含两个视频(带有音频)和一个音轨的合成中导出视频文件。 它在iPhone 5s及更高版本上正常运行,但在iPhone 5c (iOS 9.2.1)上失败。 错误返回此:

[_assetExport exportAsynchronouslyWithCompletionHandler:
 ^(void ) {
     if (AVAssetExportSessionStatusCompleted == _assetExport.status) {
         [self performSelectorOnMainThread:@selector(videoIsDone) withObject:nil waitUntilDone:YES];
     } else {
         NSLog(@"Export error: %@", _assetExport.error);
         [self performSelectorOnMainThread:@selector(videoHasFailed) withObject:nil waitUntilDone:YES];
     }
 }
 ];

它打印的日志: Export error: Error Domain=AVFoundationErrorDomain Code=-11820 "Cannot Complete Export" UserInfo={NSLocalizedRecoverySuggestion=Try exporting again., NSLocalizedDescription=Cannot Complete Export}

如前所述,在我的iPhone 5s,6和6s上,它可以很好地工作,但仅在我的iPhone 5c上,它会返回此错误。 希望有人对此有经验。

用于创建曲目和作品的完整代码:

- (void) generateVideoWithInputPath:(NSString*)inputVideo andAudioFileName:(NSString*)audioFileName andVolume:(float)volume {
NSString* introVideoPath = [[NSBundle mainBundle] pathForResource:@"IntroVideo" ofType:@"mp4"];
NSURL* introVideoUrl = [NSURL fileURLWithPath:introVideoPath];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:inputVideo];

self.outputAssetURL = NULL;
self.outputFilePath = finalVideoPath;
NSURL* outputFileUrl = [NSURL fileURLWithPath:self.outputFilePath];
unlink([self.outputFilePath UTF8String]); // remove existing result

// Create composition
AVMutableComposition* mixComposition = [AVMutableComposition composition];

// Create Asset for introVideo
AVURLAsset* introVideoAsset = [[AVURLAsset alloc] initWithURL:introVideoUrl options:nil];

// Create time ranges
CMTime introStartTime = kCMTimeZero;
CMTime introEndTime = introVideoAsset.duration;
CMTimeRange introVideo_timeRange = CMTimeRangeMake(introStartTime, introEndTime);

//add VideoTrack of introVideo to composition
NSArray*        introVideoAssetTracks = [introVideoAsset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack*   introVideoAssetTrack = ([introVideoAssetTracks count] > 0 ? [introVideoAssetTracks objectAtIndex:0] : nil);

AVMutableCompositionTrack* b_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionVideoTrack insertTimeRange:introVideo_timeRange ofTrack:introVideoAssetTrack atTime:introStartTime error:nil];

// Add AudioTrack of introVideo to composition
NSArray*        audioAssetTracksIntro = [introVideoAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack*   audioAssetTrackIntro = ([audioAssetTracksIntro count] > 0 ? [audioAssetTracksIntro objectAtIndex:0] : nil);
AVMutableCompositionTrack* a_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionAudioTrack insertTimeRange:introVideo_timeRange ofTrack:audioAssetTrackIntro atTime:introStartTime error:nil];

// Create Asset for inputVideo
CMTime nextClipStartTime = introEndTime;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];

// Create time ranges
CMTime videoStartTime = kCMTimeZero;
CMTime videoEndTime = videoAsset.duration;
if (CMTIME_IS_INVALID(videoEndTime)) {
    NSLog(@"videoEndTime is invalid");
}
CMTimeRange mainVideo_timeRange = CMTimeRangeMake(videoStartTime, videoEndTime);

// Add VideoTrack of inputVideo to composition
NSArray*       videoAssetTracks2 = [videoAsset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack*  videoAssetTrack2 = ([videoAssetTracks2 count] > 0 ? [videoAssetTracks2 objectAtIndex:0] : nil);
//    CMTime         audioDurationFix = CMTimeAdd(videoAsset.duration, CMTimeMakeWithSeconds(-1.0f, 1));
//    CMTimeRange    video_timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
//    CMTimeRange    audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioDurationFix);

AVMutableCompositionTrack* a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:mainVideo_timeRange ofTrack:videoAssetTrack2 atTime:nextClipStartTime error:nil];

// Add AudioTrack of inputVideo to composition
NSArray*        audioAssetTracks2 = [videoAsset tracksWithMediaType:AVMediaTypeAudio];
AVAssetTrack*   audioAssetTrack2 = ([audioAssetTracks2 count] > 0 ? [audioAssetTracks2 objectAtIndex:0] : nil);
//AVMutableCompositionTrack* a_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionAudioTrack insertTimeRange:mainVideo_timeRange ofTrack:audioAssetTrack2 atTime:nextClipStartTime error:nil];

AVMutableAudioMix* audioMix = NULL;

if (audioFileName) {
    NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audioFileName];


    // Create Asset for audio (song)
    AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];

    // Add Audio of song to composition
    NSArray* audioAssetTracks = [audioAsset tracksWithMediaType:AVMediaTypeAudio];
    AVAssetTrack* audioAssetTrack = ([audioAssetTracks count] > 0 ? [audioAssetTracks objectAtIndex:0] : nil);

    AVMutableCompositionTrack* b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    [b_compositionAudioTrack insertTimeRange:mainVideo_timeRange ofTrack:audioAssetTrack atTime:nextClipStartTime error:nil];

    // Set Volume of song
    NSArray *tracksToDuck = [mixComposition tracksWithMediaType:AVMediaTypeAudio];
    NSMutableArray *trackMixArray = [NSMutableArray array];
//        for (int i = 0; i < [tracksToDuck count]; i++) {
        AVAssetTrack *leTrack = [tracksToDuck objectAtIndex:0];
        AVMutableAudioMixInputParameters *trackMix = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:leTrack];
        [trackMix setVolume:1 atTime:kCMTimeZero];
        [trackMixArray addObject:trackMix];

    AVAssetTrack *leTrack2 = [tracksToDuck objectAtIndex:1];
    AVMutableAudioMixInputParameters *trackMix2 = [AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:leTrack2];
    [trackMix2 setVolume:volume atTime:kCMTimeZero];
    [trackMixArray addObject:trackMix2];
//        }

    audioMix = [AVMutableAudioMix audioMix];
    audioMix.inputParameters = trackMixArray;

}

// Export composition to videoFile
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie; //@"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
_assetExport.videoComposition = [self getVideoComposition:videoAsset intro:introVideoAsset composition:mixComposition];
// Set song volume audio
if (audioMix != NULL) {
    _assetExport.audioMix = audioMix;
}
[_assetExport exportAsynchronouslyWithCompletionHandler:
 ^(void ) {
     if (AVAssetExportSessionStatusCompleted == _assetExport.status) {
         [self performSelectorOnMainThread:@selector(videoIsDone) withObject:nil waitUntilDone:YES];
     } else {
         NSLog(@"Export error: %@", _assetExport.error);
         [self performSelectorOnMainThread:@selector(videoHasFailed) withObject:nil waitUntilDone:YES];
     }
 }
 ];


}

-(AVMutableVideoComposition *) getVideoComposition:(AVAsset *)asset intro:(AVAsset *)intro composition:( AVMutableComposition*)composition{

  AVMutableCompositionTrack *compositionIntroTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

  AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

  NSArray *audioTracksArray = [intro tracksWithMediaType:AVMediaTypeVideo];
  AVAssetTrack *introTrack;
  if (audioTracksArray.count > 0) {
    introTrack = [audioTracksArray objectAtIndex:0];
    [compositionIntroTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, intro.duration) ofTrack:introTrack atTime:kCMTimeZero error:nil];
  }

  NSArray *videoTracksArray = [asset tracksWithMediaType:AVMediaTypeVideo];
  AVAssetTrack *videoTrack;
  if (videoTracksArray.count > 0) {
    videoTrack = [videoTracksArray objectAtIndex:0];
    [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:intro.duration error:nil];
  }

  AVMutableVideoCompositionLayerInstruction *firstLayerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionIntroTrack];

  AVMutableVideoCompositionLayerInstruction *secondLayerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];

  CGSize videoSize;
  if (videoTrack && introTrack) {
    CGSize trackDimensions = [videoTrack naturalSize];
    videoSize = CGSizeMake(0, 0);
    // turn around for portrait
    if (trackDimensions.height>trackDimensions.width) {
        videoSize = CGSizeMake(trackDimensions.width, trackDimensions.height);
    } else {
        videoSize = CGSizeMake(trackDimensions.height, trackDimensions.width);
    }

    CGAffineTransform transform = videoTrack.preferredTransform;
    CGAffineTransform scale = CGAffineTransformMakeScale((videoSize.width/introTrack.naturalSize.width),(videoSize.height/introTrack.naturalSize.height));
    [firstLayerInst setTransform:scale atTime:kCMTimeZero];
    [secondLayerInst setTransform:transform atTime:kCMTimeZero];
  } else {
    videoSize = [[FilteringClass sharedFilteringClass] getVideoSize];
  }

  CMTime totalTime = CMTimeAdd(asset.duration, intro.duration);
  NSLog(@"Total videotime: %lld", totalTime.value);

  AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
  inst.timeRange = CMTimeRangeMake(kCMTimeZero, totalTime);
  inst.layerInstructions = [NSArray arrayWithObjects:firstLayerInst, secondLayerInst, nil];


  AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
  videoComposition.instructions = [NSArray arrayWithObject:inst];

  videoComposition.renderSize = videoSize;
  videoComposition.frameDuration = CMTimeMake(1, 30);
  videoComposition.renderScale = 1.0;
  return videoComposition;
}

我认为您正在达到AVFoundation中设置的解码器限制。在iOS 5中,解码器限制为4,而在iOS 6中,解码器限制为16,因此,如果可以正常工作,请尝试导出小尺寸视频,这意味着问题出在您的视频文件上。 。可能超出解码设置限制。

暂无
暂无

声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM