didOutputSampleBuffer委托没有被调用

我的代码中didOutputSampleBuffer函数没有被调用。 我不知道为什么发生。 代码如下:

import UIKit import AVFoundation import Accelerate class ViewController: UIViewController { var captureSession: AVCaptureSession? var dataOutput: AVCaptureVideoDataOutput? var customPreviewLayer: AVCaptureVideoPreviewLayer? @IBOutlet weak var camView: UIView! override func viewWillAppear(animated: Bool) { super.viewDidAppear(animated) captureSession?.startRunning() //setupCameraSession() } override func viewDidLoad() { super.viewDidLoad() // Do any additional setup after loading the view, typically from a nib. //captureSession?.startRunning() setupCameraSession() } override func didReceiveMemoryWarning() { super.didReceiveMemoryWarning() // Dispose of any resources that can be recreated. } func setupCameraSession() { // Session self.captureSession = AVCaptureSession() captureSession!.sessionPreset = AVCaptureSessionPreset1920x1080 // Capture device let inputDevice: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) var deviceInput = AVCaptureDeviceInput() do { deviceInput = try AVCaptureDeviceInput(device: inputDevice) } catch let error as NSError { print(error) } if captureSession!.canAddInput(deviceInput) { captureSession!.addInput(deviceInput) } // Preview self.customPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession) self.customPreviewLayer!.frame = camView.bounds self.customPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspect self.customPreviewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.Portrait camView.layer.addSublayer(self.customPreviewLayer!) print("Cam layer added") self.dataOutput = AVCaptureVideoDataOutput() self.dataOutput!.videoSettings = [ String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) ] dataOutput!.alwaysDiscardsLateVideoFrames = true if captureSession!.canAddOutput(dataOutput) { captureSession!.addOutput(dataOutput) } captureSession!.commitConfiguration() let queue: dispatch_queue_t = dispatch_queue_create("VideoQueue", DISPATCH_QUEUE_SERIAL) let delegate = VideoDelegate() dataOutput!.setSampleBufferDelegate(delegate, queue: queue) } func captureOutput(captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef, fromConnection connection: AVCaptureConnection) { let imageBuffer: CVImageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer)! CVPixelBufferLockBaseAddress(imageBuffer, 0) // For the iOS the luma is contained in full plane (8-bit) let width: size_t = CVPixelBufferGetWidthOfPlane(imageBuffer, 0) let height: size_t = CVPixelBufferGetHeightOfPlane(imageBuffer, 0) let bytesPerRow: size_t = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0) let lumaBuffer: UnsafeMutablePointer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0) let grayColorSpace: CGColorSpaceRef = CGColorSpaceCreateDeviceGray()! let context: CGContextRef = CGBitmapContextCreate(lumaBuffer, width, height, 8, bytesPerRow, grayColorSpace, CGImageAlphaInfo.NoneSkipFirst.rawValue)! let dstImageFilter: CGImageRef = CGBitmapContextCreateImage(context)! dispatch_sync(dispatch_get_main_queue(), {() -> Void in self.customPreviewLayer!.contents = dstImageFilter as AnyObject }) } } 

这里是我的VideoDelegate代码:

 import Foundation import AVFoundation import UIKit // Video Delegate class VideoDelegate : NSObject, AVCaptureVideoDataOutputSampleBufferDelegate { func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!){ print("hihi") } func captureOutput(captureOutput: AVCaptureOutput!, didDropSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!){ print("LOL") } } 

为什么“我的委托被调用,如何解决?我已经检查了堆栈溢出类似的问题,但我找不到解决这个问题的方法,请帮忙。

我发现我的错误的问题! 这是因为被调用的委托必须在同一个视图控制器中创build。 这里是修改后的代码:

 import UIKit import AVFoundation import Accelerate var customPreviewLayer: AVCaptureVideoPreviewLayer? class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate { var captureSession: AVCaptureSession? var dataOutput: AVCaptureVideoDataOutput? //var customPreviewLayer: AVCaptureVideoPreviewLayer? @IBOutlet weak var camView: UIView! override func viewWillAppear(animated: Bool) { super.viewDidAppear(animated) //setupCameraSession() } override func viewDidLoad() { super.viewDidLoad() // Do any additional setup after loading the view, typically from a nib. //captureSession?.startRunning() setupCameraSession() self.captureSession?.startRunning() } override func didReceiveMemoryWarning() { super.didReceiveMemoryWarning() // Dispose of any resources that can be recreated. } func setupCameraSession() { // Session self.captureSession = AVCaptureSession() self.captureSession!.sessionPreset = AVCaptureSessionPreset1920x1080 // Capture device let inputDevice: AVCaptureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) var deviceInput = AVCaptureDeviceInput() // Device input //var deviceInput: AVCaptureDeviceInput? = AVCaptureDeviceInput.deviceInputWithDevice(inputDevice, error: error) do { deviceInput = try AVCaptureDeviceInput(device: inputDevice) } catch let error as NSError { // Handle errors print(error) } if self.captureSession!.canAddInput(deviceInput) { self.captureSession!.addInput(deviceInput) } // Preview customPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession) customPreviewLayer!.frame = camView.bounds customPreviewLayer?.videoGravity = AVLayerVideoGravityResizeAspect customPreviewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.Portrait self.camView.layer.addSublayer(customPreviewLayer!) print("Cam layer added") self.dataOutput = AVCaptureVideoDataOutput() self.dataOutput!.videoSettings = [ String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) ] self.dataOutput!.alwaysDiscardsLateVideoFrames = true if self.captureSession!.canAddOutput(dataOutput) { self.captureSession!.addOutput(dataOutput) } self.captureSession!.commitConfiguration() let queue: dispatch_queue_t = dispatch_queue_create("VideoQueue", DISPATCH_QUEUE_SERIAL) //let delegate = VideoDelegate() self.dataOutput!.setSampleBufferDelegate(self, queue: queue) } func captureOutput(captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBufferRef, fromConnection connection: AVCaptureConnection) { print("buffered") let imageBuffer: CVImageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer)! CVPixelBufferLockBaseAddress(imageBuffer, 0) let width: size_t = CVPixelBufferGetWidthOfPlane(imageBuffer, 0) let height: size_t = CVPixelBufferGetHeightOfPlane(imageBuffer, 0) let bytesPerRow: size_t = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0) let lumaBuffer: UnsafeMutablePointer = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0) let grayColorSpace: CGColorSpaceRef = CGColorSpaceCreateDeviceGray()! let context: CGContextRef = CGBitmapContextCreate(lumaBuffer, width, height, 8, bytesPerRow, grayColorSpace, CGImageAlphaInfo.PremultipliedLast.rawValue)!//problematic let dstImageFilter: CGImageRef = CGBitmapContextCreateImage(context)! dispatch_sync(dispatch_get_main_queue(), {() -> Void in customPreviewLayer!.contents = dstImageFilter as AnyObject }) } } 

didOutputSampleBuffer委托没有被调用,但我在我的设置相机代码中找不到任何问题。 代码在github: 演示代码

 #import "DMVideoCamera.h" #import <UIKit/UIKit.h> @interface DMVideoCamera()<AVCaptureVideoDataOutputSampleBufferDelegate> @property (nonatomic, strong) dispatch_queue_t captureQueue; @property (nonatomic, strong) AVCaptureDevice *device; @property (nonatomic, strong) AVCaptureVideoDataOutput *output; @property (nonatomic, strong) AVCaptureDeviceInput *input; @end @implementation DMVideoCamera - (instancetype)init { if (self = [super init]) { [self setupCamera]; } return self; } - (void)setupCamera { if (![UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera]) { return; } if([self isAVCaptureActive]) { _captureQueue = dispatch_queue_create("com.dmall.ScanQueue", DISPATCH_QUEUE_SERIAL); NSError *error = nil; _device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; _session = [[AVCaptureSession alloc] init]; [self configSessionPreset]; _output = [[AVCaptureVideoDataOutput alloc] init]; [_output setVideoSettings:@{ (NSString *)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA] }]; [_output setAlwaysDiscardsLateVideoFrames:YES]; [_output setSampleBufferDelegate:self queue:_captureQueue]; _input = [AVCaptureDeviceInput deviceInputWithDevice:_device error:&error]; if ([_session canAddInput:_input]) { [_session addInput:_input]; } if ([_session canAddOutput:_output]) { [_session addOutput:_output]; } } else { [self showAccessAlert]; } } 

在viewController中:

 - (void)viewDidLoad { [super viewDidLoad]; DMVideoCamera *camera = [DMVideoCamera new]; AVCaptureVideoPreviewLayer *previewLayer =[AVCaptureVideoPreviewLayer layerWithSession:camera.session]; previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; previewLayer.frame = self.view.bounds; [self.view.layer addSublayer:previewLayer]; camera.zoomFactor = 1.6; [camera start]; }