swift:如何截图AVPlayerLayer()

如何截取AVplayerLayer的截图。 我尝试使用下面的代码,它可以很好地捕获整个视图

func screenShotMethod() { let window = UIApplication.shared.delegate!.window!! //capture the entire window into an image UIGraphicsBeginImageContextWithOptions(window.bounds.size, false, UIScreen.main.scale) window.drawHierarchy(in: window.bounds, afterScreenUpdates: false) let windowImage = UIGraphicsGetImageFromCurrentImageContext() UIGraphicsEndImageContext() //now position the image x/y away from the top-left corner to get the portion we want UIGraphicsBeginImageContext(view.frame.size) windowImage?.draw(at: CGPoint(x: -view.frame.origin.x, y: -view.frame.origin.y)) let croppedImage: UIImage = UIGraphicsGetImageFromCurrentImageContext()! UIGraphicsEndImageContext(); //embed image in an imageView, supports transforms. let resultImageView = UIImageView(image: croppedImage) UIImageWriteToSavedPhotosAlbum(croppedImage, nil, nil, nil) } 

但问题是,当我尝试在iPhone(设备)上运行相同的代码,它返回黑色image.i不知道什么是错的

任何build议将是非常有益的!

前几天,我们也遇到了同样的问题。 在哪里,如果我们把屏幕上有一个video播放器的屏幕截图, 在模拟器中,屏幕截图看起来很好。但是,在设备上,这是一个黑屏。

经过大量的尝试,我失败了,最后终于find了一个补丁(不知道这是否是解决问题的正确方法)。 但是,解决scheme做了窍门,我也能够获得设备上的屏幕截图以及模拟器。

以下是我用来解决问题的一种方法。

1 – >从video中获取当前时间的单帧(公共方法已经可用于此)

2 – >在CALayer的地方使用此缩略图(将其添加到层次结构中)

3 – >一旦我们完成从内存中删除缩略图(从层次结构中删除)

以下是相同的演示示例(给定的解决scheme是在Objective-c中,但问题是在Swift中)。

Objective – C解决scheme

  - (void)SnapShot { UIImage *capturedImage = [self getASnapShotWithAVLayer]; } - (UIImage *)getASnapShotWithAVLayer { //Add temporary thumbnail One UIImageView *temporaryViewForVideoOne = [[UIImageView alloc] initWithFrame:self.videoViewOne.bounds]; temporaryViewForVideoOne.contentMode = UIViewContentModeScaleAspectFill; UIImage *imageFromCurrentTimeForVideoOne = [self takeVideoSnapShot:_playerItem1]; int orientationFromVideoForVideoOne = [self getTheActualOrientationOfVideo:self.playerItem1]; if(orientationFromVideoForVideoOne == 0) { orientationFromVideoForVideoOne = 3; } else if (orientationFromVideoForVideoOne == 90) { orientationFromVideoForVideoOne = 0; } imageFromCurrentTimeForVideoOne = [UIImage imageWithCGImage:[imageFromCurrentTimeForVideoOne CGImage] scale:[imageFromCurrentTimeForVideoOne scale] orientation: orientationFromVideoForVideoOne]; UIImage *rotatedImageFromCurrentContextForVideoOne = [self normalizedImage:imageFromCurrentTimeForVideoOne]; temporaryViewForVideoOne.clipsToBounds = YES; temporaryViewForVideoOne.image = rotatedImageFromCurrentContextForVideoOne; [self.videoViewOne addSubview:temporaryViewForVideoOne]; CGSize imageSize = CGSizeZero; UIInterfaceOrientation orientation = [[UIApplication sharedApplication] statusBarOrientation]; if (UIInterfaceOrientationIsPortrait(orientation)) { imageSize = [UIScreen mainScreen].bounds.size; } else { imageSize = CGSizeMake([UIScreen mainScreen].bounds.size.height, [UIScreen mainScreen].bounds.size.width); } UIGraphicsBeginImageContextWithOptions(imageSize, NO, [[UIScreen mainScreen] scale]); CGContextRef context = UIGraphicsGetCurrentContext(); for (UIWindow *window in [[UIApplication sharedApplication] windows]) { CGContextSaveGState(context); CGContextTranslateCTM(context, window.center.x, window.center.y); CGContextConcatCTM(context, window.transform); CGContextTranslateCTM(context, -window.bounds.size.width * window.layer.anchorPoint.x, -window.bounds.size.height * window.layer.anchorPoint.y); if (orientation == UIInterfaceOrientationLandscapeLeft) { CGContextRotateCTM(context, M_PI_2); CGContextTranslateCTM(context, 0, -imageSize.width); } else if (orientation == UIInterfaceOrientationLandscapeRight) { CGContextRotateCTM(context, -M_PI_2); CGContextTranslateCTM(context, -imageSize.height, 0); } else if (orientation == UIInterfaceOrientationPortraitUpsideDown) { CGContextRotateCTM(context, M_PI); CGContextTranslateCTM(context, -imageSize.width, -imageSize.height); } if (![window respondsToSelector:@selector(drawViewHierarchyInRect:afterScreenUpdates:)]) { [window drawViewHierarchyInRect:window.bounds afterScreenUpdates:YES]; } else { [window drawViewHierarchyInRect:window.bounds afterScreenUpdates:YES]; } CGContextRestoreGState(context); } UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); [temporaryViewForVideoOne removeFromSuperview]; [temporaryViewForVideoTwo removeFromSuperview]; return image; } -(UIImage *)takeVideoSnapShot: (AVPlayerItem *) playerItem{ AVURLAsset *asset = (AVURLAsset *) playerItem.asset; AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:asset]; imageGenerator.requestedTimeToleranceAfter = kCMTimeZero; imageGenerator.requestedTimeToleranceBefore = kCMTimeZero; CGImageRef thumb = [imageGenerator copyCGImageAtTime:playerItem.currentTime actualTime:NULL error:NULL]; UIImage *videoImage = [UIImage imageWithCGImage:thumb]; CGImageRelease(thumb); return videoImage; } -(int)getTheActualOrientationOfVideo:(AVPlayerItem *)playerItem { AVAsset *asset = playerItem.asset; NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo]; AVAssetTrack *track = [tracks objectAtIndex:0]; CGAffineTransform videoAssetOrientation_ = [track preferredTransform]; CGFloat videoAngle = RadiansToDegrees(atan2(videoAssetOrientation_.b, videoAssetOrientation_.a)); int orientation = 0; switch ((int)videoAngle) { case 0: orientation = UIImageOrientationRight; break; case 90: orientation = UIImageOrientationUp; break; case 180: orientation = UIImageOrientationLeft; break; case -90: orientation = UIImageOrientationDown; break; default: //Not found break; } return orientation; } - (UIImage *)normalizedImage:(UIImage *)imageOf { if (imageOf.imageOrientation == UIImageOrientationUp) return imageOf; UIGraphicsBeginImageContextWithOptions(imageOf.size, NO, imageOf.scale); [imageOf drawInRect:(CGRect){0, 0, imageOf.size}]; UIImage *normalizedImage = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return normalizedImage; } 

迅速的解决scheme

 func snapShot() { let capturedImage: UIImage? = getASnapShotWithAVLayer() } func getASnapShotWithAVLayer() -> UIImage { //Add temporary thumbnail One let temporaryViewForVideoOne = UIImageView(frame: videoViewOne.bounds) //replace videoViewOne with you view which is showing AVPlayerContent temporaryViewForVideoOne.contentMode = .scaleAspectFill var imageFromCurrentTimeForVideoOne: UIImage? = takeVideoSnapShot(playerItem1) var orientationFromVideoForVideoOne: Int = getTheActualOrientationOfVideo(playerItem1) if orientationFromVideoForVideoOne == 0 { orientationFromVideoForVideoOne = 3 } else if orientationFromVideoForVideoOne == 90 { orientationFromVideoForVideoOne = 0 } imageFromCurrentTimeForVideoOne = UIImage(cgImage: imageFromCurrentTimeForVideoOne?.cgImage, scale: imageFromCurrentTimeForVideoOne?.scale, orientation: orientationFromVideoForVideoOne) let rotatedImageFromCurrentContextForVideoOne: UIImage? = normalizedImage(imageFromCurrentTimeForVideoOne) temporaryViewForVideoOne.clipsToBounds = true temporaryViewForVideoOne.image = rotatedImageFromCurrentContextForVideoOne videoViewOne.addSubview(temporaryViewForVideoOne) //Replace videoViewOne with your view containing AVPlayer var imageSize = CGSize.zero let orientation: UIInterfaceOrientation = UIApplication.shared.statusBarOrientation if UIInterfaceOrientationIsPortrait(orientation) { imageSize = UIScreen.main.bounds.size } else { imageSize = CGSize(width: CGFloat(UIScreen.main.bounds.size.height), height: CGFloat(UIScreen.main.bounds.size.width)) } UIGraphicsBeginImageContextWithOptions(imageSize, false, UIScreen.main.scale()) let context: CGContext? = UIGraphicsGetCurrentContext() for window: UIWindow in UIApplication.shared.windows { context.saveGState() context.translateBy(x: window.center.x, y: window.center.y) context.concatenate(window.transform) context.translateBy(x: -window.bounds.size.width * window.layer.anchorPoint.x, y: -window.bounds.size.height * window.layer.anchorPoint.y) if orientation == .landscapeLeft { context.rotate(by: M_PI_2) context.translateBy(x: 0, y: -imageSize.width) } else if orientation == .landscapeRight { context.rotate(by: -M_PI_2) context.translateBy(x: -imageSize.height, y: 0) } else if orientation == .portraitUpsideDown { context.rotate(by: .pi) context.translateBy(x: -imageSize.width, y: -imageSize.height) } if !window.responds(to: Selector("drawViewHierarchyInRect:afterScreenUpdates:")) { window.drawHierarchy(in: window.bounds, afterScreenUpdates: true) } else { window.drawHierarchy(in: window.bounds, afterScreenUpdates: true) } context.restoreGState() } let image: UIImage? = UIGraphicsGetImageFromCurrentImageContext() UIGraphicsEndImageContext() temporaryViewForVideoOne.removeFromSuperview() return image! } func takeVideoSnapShot(_ playerItem: AVPlayerItem) -> UIImage { let asset: AVURLAsset? = (playerItem.asset as? AVURLAsset) let imageGenerator = AVAssetImageGenerator(asset) imageGenerator.requestedTimeToleranceAfter = kCMTimeZero imageGenerator.requestedTimeToleranceBefore = kCMTimeZero let thumb: CGImageRef? = try? imageGenerator.copyCGImage(atTime: playerItem.currentTime(), actualTime: nil) let videoImage = UIImage(cgImage: thumb) CGImageRelease(thumb) return videoImage } func getTheActualOrientationOfVideo(_ playerItem: AVPlayerItem) -> Int { let asset: AVAsset? = playerItem.asset let tracks: [Any]? = asset?.tracks(withMediaType: AVMediaTypeVideo) let track: AVAssetTrack? = (tracks?[0] as? AVAssetTrack) let videoAssetOrientation_: CGAffineTransform? = track?.preferredTransform let videoAngle: CGFloat? = RadiansToDegrees(atan2(videoAssetOrientation_?.b, videoAssetOrientation_?.a)) var orientation: Int = 0 switch Int(videoAngle) { case 0: orientation = .right case 90: orientation = .up case 180: orientation = .left case -90: orientation = .down default: //Not found } return orientation } func normalizedImage(_ imageOf: UIImage) -> UIImage { if imageOf.imageOrientation == .up { return imageOf } UIGraphicsBeginImageContextWithOptions(imageOf.size, false, imageOf.scale) imageOf.draw(in: (CGRect)) let normalizedImage: UIImage? = UIGraphicsGetImageFromCurrentImageContext() UIGraphicsEndImageContext() return normalizedImage! }