如何在没有用户交互的情况下从iOS相机捕捉图像?

我需要从前置摄像头捕捉静止图像并将其存储在Documents目录中。 我在其他post上发现了一些代码,但是想分享一下,以防其他人也有类似的需求。

定义一个UIImage属性,并确保你的类实现AVCaptureVideoDataOutputSampleBufferDelegate协议:

@interface ViewController : UIViewController <AVCaptureVideoDataOutputSampleBufferDelegate> @property (nonatomic, strong) UIImage *theImage; @end 

在viewDidLoad或其他适当的东西,添加这个:

  [self captureImage]; 

实现以下方法:

 - (void)captureImage { AVCaptureSession *session = [[AVCaptureSession alloc] init]; session.sessionPreset = AVCaptureSessionPresetHigh; AVCaptureDevice *device = [self frontCamera]; NSError *error = nil; AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error]; if (!input) { // Handle the error appropriately. NSLog(@"no input....."); } [session addInput:input]; AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init]; [session addOutput:output]; output.videoSettings = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA) }; dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL); [output setSampleBufferDelegate:self queue:queue]; [session startRunning]; [session stopRunning]; } - (AVCaptureDevice *)frontCamera { NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; for (AVCaptureDevice *device in devices) { if ([device position] == AVCaptureDevicePositionFront) { return device; } } return nil; } - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { CGImageRef cgImage = [self imageFromSampleBuffer:sampleBuffer]; self.theImage = [UIImage imageWithCGImage: cgImage ]; CGImageRelease( cgImage ); NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); NSString *documentsPath = [paths objectAtIndex:0]; NSString *filePath = [documentsPath stringByAppendingPathComponent:@"image.png"]; NSData* data = UIImagePNGRepresentation(self.theImage); [data writeToFile:filePath atomically:YES]; } - (CGImageRef) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer { CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CVPixelBufferLockBaseAddress(imageBuffer,0); uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0); size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); size_t width = CVPixelBufferGetWidth(imageBuffer); size_t height = CVPixelBufferGetHeight(imageBuffer); CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); CGImageRef newImage = CGBitmapContextCreateImage(newContext); CGContextRelease(newContext); CGColorSpaceRelease(colorSpace); CVPixelBufferUnlockBaseAddress(imageBuffer,0); return newImage; }