如何显示来自图像的video

#import "ViewController.h" #import "AVFoundation/AVAssetWriter.h" #import "AVFoundation/AVAssetWriterInput.h" #import "AVFoundation/AVMediaFormat.h" #import "AVFoundation/AVVideoSettings.h" @implementation ViewController - (void)viewDidLoad { [super viewDidLoad]; NSArray *documentDirectories = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); NSString *documentDirectory = [documentDirectories objectAtIndex: 0]; // NSLog(@"Where it is %@ \n",documentDirectory); image1.image = [UIImage imageNamed:@"images1.jpg"]; CGSize sizeOfImage = image1.image.size; // printf("Size of Image width = %f height = %f\n", sizeOfImage.width,sizeOfImage.height); [self writeImageAsMovie:(UIImage*)image1.image toPath:(NSString*)documentDirectory size:(CGSize)sizeOfImage duration:6]; } - (void)writeImageAsMovie:(UIImage*)image toPath:(NSString*)path size:(CGSize)size duration:(int)duration { NSError *error = nil; AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL: [NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie error:&error]; NSParameterAssert(videoWriter); NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey, [NSNumber numberWithInt:size.width], AVVideoWidthKey, [NSNumber numberWithInt:size.height], AVVideoHeightKey, nil]; AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings]; AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil]; NSParameterAssert(writerInput); NSParameterAssert([videoWriter canAddInput:writerInput]); [videoWriter addInput:writerInput]; //Start a session: [videoWriter startWriting]; [videoWriter startSessionAtSourceTime:kCMTimeZero]; //Write samples: //CVPixelBufferRef Utils; CVPixelBufferRef buffer = [self newPixelBufferFromCGImage:image.CGImage size:size]; [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero]; [adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(duration-1, 2)]; //Finish the session: [writerInput markAsFinished]; [videoWriter endSessionAtSourceTime:CMTimeMake(duration, 2)]; [videoWriter finishWriting]; } -(CVPixelBufferRef) newPixelBufferFromCGImage:(CGImageRef)image size:(CGSize )frameSize { NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey, [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil]; CVPixelBufferRef pxbuffer = NULL; CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, frameSize.width, frameSize.height, kCVPixelFormatType_32ARGB,(__bridge CFDictionaryRef)options,&pxbuffer); NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL); status = CVPixelBufferLockBaseAddress(pxbuffer, 0); void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer); NSParameterAssert(pxdata != NULL); CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef context = CGBitmapContextCreate(pxdata, frameSize.width, frameSize.height, 8, 4*frameSize.width, rgbColorSpace, kCGImageAlphaNoneSkipFirst); NSParameterAssert(context); CGAffineTransform frameTransform; CGContextConcatCTM(context, frameTransform); CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image); CGColorSpaceRelease(rgbColorSpace); CGContextRelease(context); CVPixelBufferUnlockBaseAddress(pxbuffer, 0); return pxbuffer; } @end 

我在iOS上是新的,我已经看到一个代码,显示堆栈溢出图像的video。 我运行的代码,它没有显示任何错误,但我没有得到所需的输出。 我认为错误是这个函数的最后一行

-(void)writeImageAsMovie:(UIImage*)image toPath:(NSString*)path size:(CGSize)size duration:(int)duration) that is [videoWriter finishWriting]

请帮帮我。

如果您有一个图像arrays,只是将这些图像作为video播放,则可以使用UIImageView并使用for循环将arrays中的图像更新为图像视图。

如果您在数组中有图像。 喜欢,

 for (int i = 0; i < [array count]; i++) { imageview.image = (UIImage *)[array objectAtIndex:i]; }