Objective-C:无论我做什么CIDetector总是零

尝试使用Apple的面部检测API获得简单的概念certificate。 我看了几个其他的例子,包括Apple的SquareCam,以及这一个https://github.com/jeroentrappers/FaceDetectionPOC

基于这些,似乎我遵循正确的模式来获得API,但我被卡住了。 无论我做什么,面部探测器的CIDetector总是零!

我会非常感谢任何帮助,线索 – 提示 – 建议!

-(void)initCamera{ session = [[AVCaptureSession alloc]init]; AVCaptureDevice *device; /* if([self frontCameraAvailable]){ device = [self frontCamera]; }else{ device = [self backCamera]; }*/ device = [self frontCamera]; isUsingFrontFacingCamera = YES; NSError *error = nil; AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error]; if(input && [session canAddInput:input]){ [session addInput:input]; }else{ NSLog(@"Error %@", error); //make this Dlog... } videoDataOutput = [[AVCaptureVideoDataOutput alloc]init]; NSDictionary *rgbOutputSettings = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCMPixelFormat_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]; [videoDataOutput setVideoSettings:rgbOutputSettings]; [videoDataOutput setAlwaysDiscardsLateVideoFrames:YES]; videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL); [videoDataOutput setSampleBufferDelegate:self queue:videoDataOutputQueue]; [[videoDataOutput connectionWithMediaType:AVMediaTypeVideo]setEnabled:YES]; if ([session canAddOutput:videoDataOutput]) { [session addOutput:videoDataOutput]; } [self embedPreviewInView:self.theImageView]; [session startRunning]; } -(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{ CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CFDictionaryRef attachments = CMCopyDictionaryOfAttachments(kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate); CIImage *ciImage = [[CIImage alloc] initWithCVPixelBuffer:pixelBuffer options:(__bridge NSDictionary *)attachments]; if(attachments){ CFRelease(attachments); } UIDeviceOrientation curDeviceOrientation = [[UIDevice currentDevice] orientation]; NSDictionary *imageOptions = @{CIDetectorImageOrientation:[self exifOrientation:curDeviceOrientation] }; NSDictionary *detectorOptions = @{CIDetectorAccuracy: CIDetectorAccuracyLow}; CIDetector *faceDetector = [CIDetector detectorOfType:CIFeatureTypeFace context:nil options:detectorOptions]; NSArray *faceFeatures = [faceDetector featuresInImage:ciImage options:imageOptions]; if([faceFeatures count]>0){ NSLog(@"GOT a face!"); NSLog(@"%@", faceFeatures); } dispatch_async(dispatch_get_main_queue(), ^(void) { //NSLog(@"updating main thread"); }); } 

Interesting Posts