AVPlayerLayer获取图像到UIImageView缓冲区

我尝试[playerLayer renderInContext:UIGraphicsGetCurrentContext()]; 它会显示黑色的背景。 所以我得到当前播放器项目作为连续的重击图像它不像video播放只是静止图像不断stream动。 所以还有其他的select吗?

试试这个代码安装读者

//setUp Reader AVURLAsset * asset = [AVURLAsset URLAssetWithURL:urlvalue options:nil]; [asset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler: ^{ dispatch_async(dispatch_get_main_queue(), ^{ AVAssetTrack * videoTrack = nil; NSArray * tracks = [asset tracksWithMediaType:AVMediaTypeVideo]; if ([tracks count] == 1) { videoTrack = [tracks objectAtIndex:0]; NSError * error = nil; _movieReader = [[AVAssetReader alloc] initWithAsset:asset error:&error]; if (error) NSLog(error.localizedDescription); NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey; NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_4444AYpCbCr16]; NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key]; [_movieReader addOutput:[AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:videoSettings]]; [_movieReader startReading]; } }); }]; 

得到下一个电影帧

 - (void) readNextMovieFrame { if (_movieReader.status == AVAssetReaderStatusReading) { AVAssetReaderTrackOutput * output = [_movieReader.outputs objectAtIndex:0]; CMSampleBufferRef sampleBuffer = [output copyNextSampleBuffer]; if (sampleBuffer) { CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); // Lock the image buffer CVPixelBufferLockBaseAddress(imageBuffer,0); // Get information of the image uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer); size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); size_t width = CVPixelBufferGetWidth(imageBuffer); size_t height = CVPixelBufferGetHeight(imageBuffer); /*We unlock the image buffer*/ CVPixelBufferUnlockBaseAddress(imageBuffer,0); /*Create a CGImageRef from the CVImageBufferRef*/ CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); CGImageRef newImage = CGBitmapContextCreateImage(newContext); /*We release some components*/ CGContextRelease(newContext); CGColorSpaceRelease(colorSpace); /*We display the result on the custom layer*/ /*self.customLayer.contents = (id) newImage;*/ /*We display the result on the image view (We need to change the orientation of the image so that the video is displayed correctly)*/ UIImage *image= [UIImage imageWithCGImage:newImage scale:0.0 orientation:UIImageOrientationRight]; UIGraphicsBeginImageContext(image.size); [image drawAtPoint:CGPointMake(0, 0)]; // UIImage *img=UIGraphicsGetImageFromCurrentImageContext(); videoImage=UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); //videoImage=image; // if (frameCount < 40) { NSLog(@"readNextMovieFrame==%d",frameCount); NSString* filename = [NSString stringWithFormat:@"Documents/frame_%d.png", frameCount]; NSString* pngPath = [NSHomeDirectory() stringByAppendingPathComponent:filename]; [UIImagePNGRepresentation(videoImage) writeToFile: pngPath atomically: YES]; frameCount++; // } CVPixelBufferUnlockBaseAddress(imageBuffer,0); CFRelease(sampleBuffer); } } }