在iPhone应用程序中合并两个video文件

在我的一个应用程序中,我需要在video中添加一些图像。 所以我剪了两个部分的video,并从这个图像制作一个video。 现在我想结合这三个video文件,并制作一个video文件。 但我没有任何想法结合这三个video。 我在这里看到一些代码。 但这对我没有帮助。 对于中断video和从video中使用下面的代码我现在要代码来合并这个所有的video。

任何其他想法把当前视图屏幕放在video文件之间。

中断video文件

NSURL *url = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"Affagogato" ofType:@"mp4"]]; AVURLAsset *anAsset = [[AVURLAsset alloc] initWithURL:url options:nil]; for(int i = 0; i < 2; i++) { AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:anAsset presetName:AVAssetExportPresetLowQuality]; NSString *filePath = nil; NSUInteger count = 0; do { filePath = NSTemporaryDirectory(); NSString *numberString = count > 0 ? [NSString stringWithFormat:@"-%i", count] : @""; filePath = [filePath stringByAppendingPathComponent:[NSString stringWithFormat:@"Output-%@.mov", numberString]]; count++; } while([[NSFileManager defaultManager] fileExistsAtPath:filePath]); exportSession.outputURL = [NSURL fileURLWithPath:filePath]; exportSession.outputFileType = AVFileTypeQuickTimeMovie; CMTimeRange range; if(i == 0){ CMTime start = CMTimeMakeWithSeconds(0.0, 600); CMTime duration = CMTimeMakeWithSeconds(10.0, 600); range = CMTimeRangeMake(start, duration); }else{ CMTime start = CMTimeMakeWithSeconds(10.0, 600); range = CMTimeRangeMake(start, anAsset.duration); } exportSession.timeRange = range; [exportSession exportAsynchronouslyWithCompletionHandler:^ { dispatch_async(dispatch_get_main_queue(), ^{ [self exportDidFinish:exportSession Tag:i]; }); }]; } 

从图像获取video

 CGRect rect=CGRectMake(0, 0, 320, 480); view = [[UIView alloc]initWithFrame:rect]; NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); NSString *documentsDirectory = ([paths count] > 0) ? [paths objectAtIndex:0] : nil; NSString *path = [documentsDirectory stringByAppendingPathComponent:[@"video2" stringByAppendingString:@".mov"]]; CGSize size = self.view.frame.size; NSMutableDictionary *attributes = [[NSMutableDictionary alloc]init]; [attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey]; [attributes setObject:[NSNumber numberWithUnsignedInt:320] forKey:(NSString*)kCVPixelBufferWidthKey]; [attributes setObject:[NSNumber numberWithUnsignedInt:480] forKey:(NSString*)kCVPixelBufferHeightKey]; NSError *error = nil; AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL: [NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie error:&error]; NSParameterAssert(videoWriter); NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey, [NSNumber numberWithInt:size.width], AVVideoWidthKey, [NSNumber numberWithInt:size.height], AVVideoHeightKey, nil]; AVAssetWriterInput* writerInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain]; AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil]; NSParameterAssert(writerInput); NSParameterAssert([videoWriter canAddInput:writerInput]); [videoWriter addInput:writerInput]; //Start a session: [videoWriter startWriting]; [videoWriter startSessionAtSourceTime:kCMTimeZero]; CVPixelBufferRef buffer = NULL; //convert uiimage to CGImage. xPixel=0; yPixel=250; buffer = [self pixelBufferFromCGImage:[[UIImage imageNamed:@"1.jpeg"] CGImage]]; CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor.pixelBufferPool, &buffer); [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero]; for (int i = 0;i<2; i++) { if([writerInput isReadyForMoreMediaData]) { //NSLog(@"inside for loop %d",i); for(int pframetime=1;pframetime<=2;pframetime++) { CMTime frameTime = CMTimeMake(pframetime,25); CMTime lastTime=CMTimeMake(i,1); //i is from 0 to 19 of the loop above CMTime presentTime=CMTimeAdd(lastTime, frameTime); if(i==0) buffer = [self pixelBufferFromCGImage:[[UIImage imageNamed:@"1.jpeg"] CGImage]]; else buffer = [self pixelBufferFromCGImage:[[UIImage imageNamed:@"2.jpeg"] CGImage]]; while ( ![writerInput isReadyForMoreMediaData] ) { [NSThread sleepForTimeInterval:0.05]; } [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime]; i++; } if(buffer) CVBufferRelease(buffer); //[NSThread sleepForTimeInterval:0.1]; } } [writerInput markAsFinished]; [videoWriter finishWriting]; [videoPathArray addObject:path]; //Finish the session: [videoWriter release]; [writerInput release]; CVPixelBufferPoolRelease(adaptor.pixelBufferPool); 

对于合并video文件,我尝试这个代码,但没有用,这里是一些video之间的空白屏幕

  AVMutableComposition* mixComposition = [AVMutableComposition composition]; NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0]; NSString* video_inputFilePath1 = [videoPathArray objectAtIndex:1]; NSURL* video_inputFileUrl1 = [NSURL fileURLWithPath:video_inputFilePath1]; NSString* video_inputFilePath2 = [videoPathArray objectAtIndex:0]; NSURL* video_inputFileUrl2 = [NSURL fileURLWithPath:video_inputFilePath2]; NSString* video_inputFilePath3 = [videoPathArray objectAtIndex:2]; NSURL* video_inputFileUrl3 = [NSURL fileURLWithPath:video_inputFilePath3]; NSString* outputFileName = @"outputFile.mov"; NSString* outputFilePath = [NSString stringWithFormat:@"%@/%@",documentsDirectoryPath,outputFileName]; NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath]; if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath]) [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil]; CMTime nextClipStartTime = kCMTimeZero; AVURLAsset* videoAsset1 = [[AVURLAsset alloc]initWithURL:video_inputFileUrl1 options:nil]; AVURLAsset* videoAsset2 = [[AVURLAsset alloc]initWithURL:video_inputFileUrl2 options:nil]; AVURLAsset* videoAsset3 = [[AVURLAsset alloc]initWithURL:video_inputFileUrl3 options:nil]; CMTimeRange video_timeRange1 = CMTimeRangeMake(kCMTimeZero,videoAsset1.duration); AVMutableCompositionTrack *a_compositionVideoTrack1 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; [a_compositionVideoTrack1 insertTimeRange:video_timeRange1 ofTrack:[[videoAsset1 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil]; CMTimeRange video_timeRange3 = CMTimeRangeMake(nextClipStartTime,videoAsset3.duration); [a_compositionVideoTrack1 insertTimeRange:video_timeRange3 ofTrack:[[videoAsset3 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:videoAsset1.duration error:nil]; CMTimeRange video_timeRange2 = CMTimeRangeMake(nextClipStartTime,videoAsset1.duration); [a_compositionVideoTrack1 insertTimeRange:video_timeRange2 ofTrack:[[videoAsset2 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:videoAsset1.duration error:nil]; AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetLowQuality]; _assetExport.shouldOptimizeForNetworkUse = YES; _assetExport.outputFileType = @"com.apple.quicktime-movie"; _assetExport.outputURL = outputFileUrl; 

尝试观看苹果开发者门户网站上名为“在AV基础上使用媒体”的video。 它告诉你如何做你所描述的。

https://developer.apple.com/videos/wwdc/2011/