video预览期间的屏幕截图失败

我正在尝试使用AVFoundation(AVCaptureDeviceInput和AVCaptureVideoDataOutput)进行video预览时捕获屏幕,

启动预览:

func startCamera(){ var screenSize = UIScreen.mainScreen().bounds.size; self.previewView = UIView(frame: CGRectMake(0, 0, UIScreen.mainScreen().bounds.size.width, UIScreen.mainScreen().bounds.size.height)); self.previewView.contentMode = UIViewContentMode.ScaleAspectFit self.view.addSubview(previewView); session.sessionPreset = AVCaptureSessionPresetHigh let devices = AVCaptureDevice.devices(); // Loop through all the capture devices on this phone for device in devices { // Make sure this particular device supports video if (device.hasMediaType(AVMediaTypeVideo)) { // Finally check the position and confirm we've got the front camera if(device.position == AVCaptureDevicePosition.Back) { captureDevice = device as? AVCaptureDevice; if captureDevice != nil { beginSession(); break; } } } } } func beginSession() { var err : NSError? = nil var deviceInput:AVCaptureDeviceInput = AVCaptureDeviceInput(device: captureDevice!, error: &err); if err != nil { println("error: \(err?.localizedDescription)"); } if session.canAddInput(deviceInput){ session.addInput(deviceInput); } videoDataOutput = AVCaptureVideoDataOutput() if let videoDataOutput = videoDataOutput { var rgbOutputSettings = [NSNumber(integer: kCMPixelFormat_32BGRA):kCVPixelBufferPixelFormatTypeKey] videoDataOutput.alwaysDiscardsLateVideoFrames=true; videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue) if session.canAddOutput(self.videoDataOutput){ session.addOutput(self.videoDataOutput) } videoDataOutput.connectionWithMediaType(AVMediaTypeVideo).enabled = true if let previewLayer = AVCaptureVideoPreviewLayer(session: self.session) { self.previewLayer = previewLayer previewLayer.videoGravity = AVLayerVideoGravityResizeAspect previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight var rootLayer :CALayer = self.previewView.layer; rootLayer.masksToBounds=true; previewLayer.frame = rootLayer.bounds; rootLayer.addSublayer(self.previewLayer); session.startRunning(); delay(8, closure: { () -> () in self.processImage() }) } } } 

捕获屏幕的代码:

 func processImage() { UIGraphicsBeginImageContextWithOptions(view.bounds.size, false, 0) previewLayer!.renderInContext(UIGraphicsGetCurrentContext()) // tried previewView!.layer.render... to no avail let previewImage = UIGraphicsGetImageFromCurrentImageContext() UIGraphicsEndImageContext() savePDFImage(previewImage, name: "front.pdf") } 

返回的图像全是白色的。 如何在进行video预览时抓取屏幕上的内容截图?

不要捕捉屏幕。 相反,从缓冲区捕获一个帧并使用它。

实现AVCaptureVideoDataOutputSampleBufferDelegate。

在VideoDataOutput上,设置setSampleBufferDelegate

实现captureOutput(captureOutput:AVCaptureOutput!,didOutputSampleBuffer sampleBuffer:CMSampleBuffer!,fromConnection connection:AVCaptureConnection!)方法。

将图像存储到设备时,请自行播放快门音。

最后,你的代码看起来更像这样:

 var videoDataOutput:AVCaptureVideoDataOutput?; var videoDataOutputQueue:dispatch_queue_t = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL); var stillImageOutput:AVCaptureStillImageOutput? var previewLayer:AVCaptureVideoPreviewLayer? var captureDevice:AVCaptureDevice? let session = AVCaptureSession() func beginSession() { var err : NSError? = nil var deviceInput:AVCaptureDeviceInput = AVCaptureDeviceInput(device: captureDevice!, error: &err); if err != nil { println("error: \(err?.localizedDescription)"); } if session.canAddInput(deviceInput){ session.addInput(deviceInput); } stillImageOutput = AVCaptureStillImageOutput() videoDataOutput = AVCaptureVideoDataOutput() if let videoDataOutput = videoDataOutput, stillImageOutput = stillImageOutput { videoDataOutput.alwaysDiscardsLateVideoFrames=true; videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:Int(kCVPixelFormatType_32BGRA)] videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue) if session.canAddOutput(videoDataOutput){ session.addOutput(videoDataOutput) } stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG] if session.canAddOutput(stillImageOutput) { session.addOutput(stillImageOutput) } videoDataOutput.connectionWithMediaType(AVMediaTypeVideo).enabled = true if let previewLayer = AVCaptureVideoPreviewLayer(session: self.session) { self.previewLayer = previewLayer previewLayer.videoGravity = AVLayerVideoGravityResizeAspect previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight var rootLayer :CALayer = self.previewView.layer; rootLayer.masksToBounds=true; previewLayer.frame = rootLayer.bounds; rootLayer.addSublayer(self.previewLayer); session.startRunning(); } } } // this gets called periodically with an image func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { if let image = CheckResponse.imageFromSampleBuffer(sampleBuffer) { if keepImage(image) { AudioServicesPlaySystemSound(1108) session.stopRunning() } } } // This is in the Objective-C CheckResponse class to get an image from the buffer: + (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer { CVPixelBufferRef pb = CMSampleBufferGetImageBuffer(sampleBuffer); CIImage *ciimg = [CIImage imageWithCVPixelBuffer:pb]; // show result CIContext *context = [CIContext contextWithOptions:nil]; CGImageRef ref = [context createCGImage:ciimg fromRect:ciimg.extent]; UIImage *image = [UIImage imageWithCGImage:ref scale:1.0 orientation:(UIImageOrientationUp)]; CFRelease(ref); return image; }