iOS – AVAssestExportSession只能在播放AVPlayer后导出最多8首曲目

我试图循环录制的video的一些片段,并将它们合并成一个video。 我已经成功合并和导出了一个多达16首曲目的作品。 但是当我在合并之前尝试使用AVPlayer播放这个作品时,我最多只能导出8首曲目。

首先,我创buildAVCompositionAVVideoComposition

  +(void)previewUserClipDanceWithAudio:(NSURL*)videoURL audioURL:(NSURL*)audioFile loop:(NSArray*)loopTime slowMotion:(NSArray*)slowFactor showInViewController:(UIViewController*)viewController completion:(void(^)(BOOL success, AVVideoComposition* videoComposition, AVComposition* composition))completion{ AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init]; NSMutableArray *arrayInstruction = [[NSMutableArray alloc] init]; AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; AVURLAsset *audioAsset = [[AVURLAsset alloc]initWithURL:audioFile options:nil]; //NSLog(@"audio File %@",audioFile); CMTime duration = kCMTimeZero; AVAsset *currentAsset = [AVAsset assetWithURL:videoURL]; BOOL isCurrentAssetPortrait = YES; for(NSInteger i=0;i< [loopTime count]; i++) { //handle looptime array NSInteger loopDur = [[loopTime objectAtIndex:i] intValue]; NSInteger value = labs(loopDur); //NSLog(@"loopInfo %d value %d",loopInfo,value); //handle slowmotion array double slowInfo = [[slowFactor objectAtIndex:i] doubleValue]; double videoScaleFactor = fabs(slowInfo); AVMutableCompositionTrack *currentTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; AVMutableCompositionTrack *audioTrack; audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; if (i==0) { [currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil]; [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil]; } else { [currentTrack insertTimeRange:CMTimeRangeMake(CMTimeSubtract(currentAsset.duration, CMTimeMake(value, 10)), CMTimeMake(value, 10)) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil]; if (videoScaleFactor==1) { [audioTrack insertTimeRange:CMTimeRangeMake(CMTimeSubtract(currentAsset.duration, CMTimeMake(value, 10)), CMTimeMake(value, 10)) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil]; } //slow motion here if (videoScaleFactor!=1) { [currentTrack scaleTimeRange:CMTimeRangeMake(CMTimeSubtract(currentAsset.duration, CMTimeMake(value, 10)), CMTimeMake(value, 10)) toDuration:CMTimeMake(value*videoScaleFactor, 10)]; NSLog(@"slowmo %f",value*videoScaleFactor); } } AVMutableVideoCompositionLayerInstruction *currentAssetLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:currentTrack]; AVAssetTrack *currentAssetTrack = [[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; BOOL isCurrentAssetPortrait = YES; //CGFloat assetScaleToFitRatio; //assetScaleToFitRatio = [self getScaleToFitRatioCurrentTrack:currentTrack]; if(isCurrentAssetPortrait){ //NSLog(@"portrait"); if (slowInfo<0) { CGRect screenRect = [[UIScreen mainScreen] bounds]; CGFloat ratio = screenRect.size.height / screenRect.size.width; // we have to adjust the ratio for 16:9 screens if (ratio == 1.775) ratio = 1.77777777777778; CGFloat complimentSize = (currentAssetTrack.naturalSize.height*ratio); CGFloat tx = (currentAssetTrack.naturalSize.width-complimentSize)/2; // invert translation because of portrait tx *= -1; // t1: rotate and position video since it may have been cropped to screen ratio CGAffineTransform t1 = CGAffineTransformTranslate(currentAssetTrack.preferredTransform, tx, 0); // t2/t3: mirror video vertically CGAffineTransform t2 = CGAffineTransformTranslate(t1, currentAssetTrack.naturalSize.width, 0); CGAffineTransform t3 = CGAffineTransformScale(t2, -1, 1); [currentAssetLayerInstruction setTransform:t3 atTime:duration]; } else if (loopDur<0) { CGRect screenRect = [[UIScreen mainScreen] bounds]; CGFloat ratio = screenRect.size.height / screenRect.size.width; // we have to adjust the ratio for 16:9 screens if (ratio == 1.775) ratio = 1.77777777777778; CGFloat complimentSize = (currentAssetTrack.naturalSize.height*ratio); CGFloat tx = (currentAssetTrack.naturalSize.width-complimentSize)/2; // invert translation because of portrait tx *= -1; // t1: rotate and position video since it may have been cropped to screen ratio CGAffineTransform t1 = CGAffineTransformTranslate(currentAssetTrack.preferredTransform, tx, 0); // t2/t3: mirror video horizontally CGAffineTransform t2 = CGAffineTransformTranslate(t1, 0, currentAssetTrack.naturalSize.height); CGAffineTransform t3 = CGAffineTransformScale(t2, 1, -1); [currentAssetLayerInstruction setTransform:t3 atTime:duration]; } else { [currentAssetLayerInstruction setTransform:currentAssetTrack.preferredTransform atTime:duration]; } }else{ // CGFloat translateAxisX = (currentTrack.naturalSize.width > MAX_WIDTH )?(0.0):0.0;// if use <, 640 video will be moved left by 10px. (float)(MAX_WIDTH - currentTrack.naturalSize.width)/(float)4.0 // CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(assetScaleToFitRatio,assetScaleToFitRatio); // [currentAssetLayerInstruction setTransform: // CGAffineTransformConcat(CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(translateAxisX, 0)) atTime:duration]; } if (i==0) { duration=CMTimeAdd(duration, currentAsset.duration); } else { if (videoScaleFactor!=1) { duration=CMTimeAdd(duration, CMTimeMake(value*videoScaleFactor, 10)); } else { duration=CMTimeAdd(duration, CMTimeMake(value, 10)); } } [currentAssetLayerInstruction setOpacity:0.0 atTime:duration]; [arrayInstruction addObject:currentAssetLayerInstruction]; } AVMutableCompositionTrack *AudioBGTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; [AudioBGTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset.duration) ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:CMTimeSubtract(duration, audioAsset.duration) error:nil]; videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, duration); videoCompositionInstruction.layerInstructions = arrayInstruction; CGSize naturalSize; if(isCurrentAssetPortrait){ naturalSize = CGSizeMake(MAX_HEIGHT,MAX_WIDTH);//currentAssetTrack.naturalSize.height,currentAssetTrack.naturalSize.width); } else { naturalSize = CGSizeMake(MAX_WIDTH,MAX_HEIGHT);//currentAssetTrack.naturalSize; } AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition]; videoComposition.instructions = [NSArray arrayWithObject:videoCompositionInstruction]; videoComposition.frameDuration = CMTimeMake(1, 30); videoComposition.renderSize = CGSizeMake(naturalSize.width,naturalSize.height); NSLog(@"prepared"); AVVideoComposition *composition = [videoComposition copy]; AVComposition *mixedComposition = [mixComposition copy]; completion(YES, composition, mixedComposition); } 

然后,我设置AVPlayer

  -(void)playVideoWithComposition:(AVVideoComposition*)videoComposition inMutableComposition:(AVComposition*)composition{ MBProgressHUD *hud = [MBProgressHUD showHUDAddedTo:self.view animated:YES]; hud.label.text = myLanguage(@"kMergeClip"); savedComposition = [composition copy]; savedVideoComposition = [videoComposition copy]; playerItem = [AVPlayerItem playerItemWithAsset:composition]; playerItem.videoComposition = videoComposition; [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(repeatVideo:) name:AVPlayerItemDidPlayToEndTimeNotification object:playerItem]; if (!player) { player = [AVPlayer playerWithPlayerItem:playerItem]; layer = [AVPlayerLayer playerLayerWithPlayer:player]; layer.frame = [UIScreen mainScreen].bounds; [self.ibPlayerView.layer insertSublayer:layer atIndex:0]; NSLog(@"create new player"); } if (player.currentItem != playerItem ) { [player replaceCurrentItemWithPlayerItem:playerItem]; } player.actionAtItemEnd = AVPlayerActionAtItemEndNone; //[player seekToTime:kCMTimeZero]; [playerItem addObserver:self forKeyPath:@"status" options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew context:@"AVPlayerStatus"]; } 

当用户预览所有他们想要的video并点击保存。 我用这个方法导出

  +(void)mergeUserCLip:(AVVideoComposition*)videoComposition withAsset:(AVComposition*)mixComposition showInViewController:(UIViewController*)viewController completion:(void(^)(BOOL success, NSURL *fileURL))completion{ MBProgressHUD *hud = [MBProgressHUD showHUDAddedTo:viewController.view animated:YES]; hud.mode = MBProgressHUDModeDeterminateHorizontalBar; hud.label.text = myLanguage(@"kMergeClip"); //Name merge clip using beat name //NSString* beatName = [[[NSString stringWithFormat:@"%@",audioFile] lastPathComponent] stringByDeletingPathExtension]; NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); NSString *documentsDirectory = [paths objectAtIndex:0]; NSString *tmpDir = [[documentsDirectory stringByDeletingLastPathComponent] stringByAppendingPathComponent:@"tmp"]; NSString *myPathDocs = [tmpDir stringByAppendingPathComponent:[NSString stringWithFormat:@"merge-beat.mp4"]]; //Not remove here, will remove when call previewPlayVC [[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil]; // 1 - set up the overlay CALayer *overlayLayer = [CALayer layer]; UIImage *overlayImage = [UIImage imageNamed:@"watermark.png"]; [overlayLayer setContents:(id)[overlayImage CGImage]]; overlayLayer.frame = CGRectMake(720-221, 1280-109, 181, 69); [overlayLayer setMasksToBounds:YES]; // aLayer = [CALayer layer]; // [aLayer addSublayer:labelLogo.layer]; // aLayer.frame = CGRectMake(MAX_WIDTH- labelLogo.width - 10.0, MAX_HEIGHT-50.0, 20.0, 20.0); // aLayer.opacity = 1; // 2 - set up the parent layer CALayer *parentLayer = [CALayer layer]; CALayer *videoLayer = [CALayer layer]; parentLayer.frame = CGRectMake(0, 0, MAX_HEIGHT,MAX_WIDTH); videoLayer.frame = CGRectMake(0, 0, MAX_HEIGHT,MAX_WIDTH); [parentLayer addSublayer:videoLayer]; [parentLayer addSublayer:overlayLayer]; // 3 - apply magic AVMutableVideoComposition *mutableVideoComposition = [videoComposition copy]; mutableVideoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer]; NSURL *url = [NSURL fileURLWithPath:myPathDocs]; myLog(@"Path: %@", myPathDocs); AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset1280x720]; exporter.outputURL = url; exporter.outputFileType = AVFileTypeMPEG4; exporter.videoComposition = mutableVideoComposition; exporter.shouldOptimizeForNetworkUse = NO; [exporter exportAsynchronouslyWithCompletionHandler:^ { //NSLog(@"exporting"); switch (exporter.status) { case AVAssetExportSessionStatusCompleted: { NSURL *url = [NSURL fileURLWithPath:myPathDocs]; hud.progress = 1.0f; dispatch_async(dispatch_get_main_queue(), ^{ [MBProgressHUD hideHUDForView:viewController.view animated:YES]; }); [self checkTmpSize]; if (completion) { completion(YES, url); } } break; case AVAssetExportSessionStatusExporting: myLog(@"Exporting!"); break; case AVAssetExportSessionStatusWaiting: myLog(@"Waiting"); break; default: break; } }]; } 

如果select循环less于8次,上面的代码工作正常。 如果select的选项超过8次,导出会话冻结显示export.progress = 0.0000000如果我删除这一行

  playerItem.videoComposition = videoComposition; 

然后,我无法预览混合video,但可以正常导出(最多16轨)。

或者,如果我删除导出代码中的行:

  exporter.videoComposition = mutableVideoComposition; 

然后可以预览混合video,并且无需video组合即可正常导出。

所以我想AVVideoComposition和/或我的实施方式有问题。

我将不胜感激任何build议。 非常感谢。

我非常怀疑,这是使用AVPlayer预览video以某种方式妨碍AVAssetExportSession ,如下面的post所述:

iOS 5:将3个video与AVAssetExportSession合并时出错

AVPlayerItem失败,AVStatusFailed和错误代码“无法解码”

我遇到了这个问题,试图连接N个video,同时播放UICollectionView中的3个videoI AVPlayer实例。 在你关联的堆栈溢出问题中已经讨论过,iOS只能处理AVPlayer这么多的实例。 每个实例使用了一个“渲染pipe道”。 我发现AVMutableCompositionTrack每个实例也使用了其中一个渲染pipe道。

因此,如果使用过多的AVPlayer实例或尝试创buildAVMutableCompositionTrack轨道过多的AVMutableCompositionTrack ,则可能会用尽资源来解码H264,并且您将收到“无法解码”错误。 我只能使用两个AVMutableCompositionTrack实例来解决这个问题。 这样我可以“重叠”video片段,同时也应用转换(这需要两个video轨道“同时玩”)。

简而言之:尽量减lessAVMutableCompositionTrackAVPlayer 。 您可以查看Apple的AVCustomEdit示例代码,以获取相关示例。 具体来说,请查看APLSimpleEditor类中的buildTransitionComposition方法。

尝试这个,在出口之前清除播放器项目

 [self.player replaceCurrentItemWithPlayerItem:nil];