如何使用AVCaptureVideoPreviewLayer的UIImage代替AVCapturePhotoOutput捕获

我想将预览图层“流”到我的服务器,但是,我只想要发送特定的帧。 基本上,我想拍摄AVCaptureVideoPreviewLayer的快照,将其缩小到28 * 28,将其转换为强度数组,并将其发送到我的套接字层,我的python后端处理其余部分。

这里的问题是AVCapturePhotoOutput的捕获function非常慢。 我无法反复调用该function。 更不用说它总是让相机快门声响起哈哈。

另一个问题是拍摄AVCaptureVideoPreviewLayer的快照真的很难。 使用UIGraphicsBeginImageContext几乎总是返回空白/清晰图像。

帮助一个兄弟,谢谢!

基本上不使用AVCaptureVideoPreviewLayer来抓取帧,你应该使用AVCaptureVideoDataOutputSampleBufferDelegate。 这是一个例子:

import Foundation import UIKit import AVFoundation protocol CaptureManagerDelegate: class { func processCapturedImage(image: UIImage) } class CaptureManager: NSObject { internal static let shared = CaptureManager() weak var delegate: CaptureManagerDelegate? var session: AVCaptureSession? override init() { super.init() session = AVCaptureSession() //setup input let device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo) let input = try! AVCaptureDeviceInput(device: device) session?.addInput(input) //setup output let output = AVCaptureVideoDataOutput() output.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable: kCVPixelFormatType_32BGRA] output.setSampleBufferDelegate(self, queue: DispatchQueue.main) session?.addOutput(output) } func statSession() { session?.startRunning() } func stopSession() { session?.stopRunning() } func getImageFromSampleBuffer(sampleBuffer: CMSampleBuffer) ->UIImage? { guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return nil } CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly) let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer) let width = CVPixelBufferGetWidth(pixelBuffer) let height = CVPixelBufferGetHeight(pixelBuffer) let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer) let colorSpace = CGColorSpaceCreateDeviceRGB() let bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue) guard let context = CGContext(data: baseAddress, width: width, height: height, bitsPerComponent: 8, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: bitmapInfo.rawValue) else { return nil } guard let cgImage = context.makeImage() else { return nil } let image = UIImage(cgImage: cgImage, scale: 1, orientation:.right) CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly) return image } } extension CaptureManager: AVCaptureVideoDataOutputSampleBufferDelegate { func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) { guard let outputImage = getImageFromSampleBuffer(sampleBuffer: sampleBuffer) else { return } delegate?.processCapturedImage(image: outputImage) } } 

更新:要处理图像,您应该在任何其他所需的类中实现CaptureManagerDelegate协议的processCapturedImage方法,例如:

 import UIKit class ViewController: UIViewController { @IBOutlet weak var imageView: UIImageView! override func viewDidLoad() { super.viewDidLoad() CaptureManager.shared.statSession() CaptureManager.shared.delegate = self } } extension ViewController: CaptureManagerDelegate { func processCapturedImage(image: UIImage) { self.imageView.image = image } } 

@ninjaproger的回答太棒了! 只需将此作为Swift 4版本的答案,以备将来参考。

 import UIKit import AVFoundation var customPreviewLayer: AVCaptureVideoPreviewLayer? class ViewController: UIViewController { @IBOutlet weak var imageView: UIImageView! override func viewDidLoad() { super.viewDidLoad() CaptureManager.shared.statSession() CaptureManager.shared.delegate = self } } extension ViewController: CaptureManagerDelegate { func processCapturedImage(image: UIImage) { self.imageView.image = image } } protocol CaptureManagerDelegate: class { func processCapturedImage(image: UIImage) } class CaptureManager: NSObject { internal static let shared = CaptureManager() weak var delegate: CaptureManagerDelegate? var session: AVCaptureSession? override init() { super.init() session = AVCaptureSession() //setup input let device = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) let input = try! AVCaptureDeviceInput(device: device!) session?.addInput(input) //setup output let output = AVCaptureVideoDataOutput() output.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable as! String: kCVPixelFormatType_32BGRA] output.setSampleBufferDelegate(self, queue: DispatchQueue.main) session?.addOutput(output) } func statSession() { session?.startRunning() } func stopSession() { session?.stopRunning() } func getImageFromSampleBuffer(sampleBuffer: CMSampleBuffer) ->UIImage? { guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return nil } CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly) let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer) let width = CVPixelBufferGetWidth(pixelBuffer) let height = CVPixelBufferGetHeight(pixelBuffer) let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer) let colorSpace = CGColorSpaceCreateDeviceRGB() let bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue) guard let context = CGContext(data: baseAddress, width: width, height: height, bitsPerComponent: 8, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: bitmapInfo.rawValue) else { return nil } guard let cgImage = context.makeImage() else { return nil } let image = UIImage(cgImage: cgImage, scale: 1, orientation:.right) CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly) return image } } extension CaptureManager: AVCaptureVideoDataOutputSampleBufferDelegate { func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { guard let outputImage = getImageFromSampleBuffer(sampleBuffer: sampleBuffer) else { return } delegate?.processCapturedImage(image: outputImage) } }