在后台捕获图像?

我正在尝试从相机的背景中捕获图像,无需加载相机或预览界面。

在我的应用程序中,照片是在没有预览屏幕的背景下拍摄的,只是普通的应用程序屏幕,然后在稍后显示给用户。

有人能指点我的方向吗?

你必须使用AVCaptureSession和AVCaptureDeviceInput。

这是代码的一部分可能会帮助你:

@interface MyViewController : UIViewController { AVCaptureStillImageOutput *_output; AVCaptureConnection *_videoConnection; bool _isCaptureSessionStarted; } @property (retain, nonatomic) AVCaptureDevice *frontalCamera; - (void)takePhoto; 

执行:

 @interface MyViewController () @end @implementation MyViewController - (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil { self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil]; if (self) { _isCaptureSessionStarted = false; } return self; } - (void)viewDidLoad { [super viewDidLoad]; // Finding frontal camera NSArray *cameras = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; for (int i = 0; i < cameras.count; i++) { AVCaptureDevice *camera = [cameras objectAtIndex:i]; if (camera.position == AVCaptureDevicePositionFront) { self.frontalCamera = camera; [self.frontalCamera addObserver:self forKeyPath:@"adjustingExposure" options:NSKeyValueObservingOptionNew context:nil]; [self.frontalCamera addObserver:self forKeyPath:@"adjustingWhiteBalance" options:NSKeyValueObservingOptionNew context:nil]; } } } - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context { if (!self.frontalCamera.adjustingExposure && !self.frontalCamera.adjustingWhiteBalance) { if (_isCaptureSessionStarted) { [self captureStillImage]; } } } - (void)takePhoto { if (self.frontalCamera != nil) { // Add camera to session AVCaptureSession *session = [[AVCaptureSession alloc] init]; NSError *error; AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:self.frontalCamera error:&error]; if (!error && [session canAddInput:input]) { [session addInput:input]; // Capture still image _output = [[AVCaptureStillImageOutput alloc] init]; // Captured image settings [_output setOutputSettings:[[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil]]; if ([session canAddOutput:_output]) { [session addOutput:_output]; _videoConnection = nil; for (AVCaptureConnection *connection in _output.connections) { for (AVCaptureInputPort *port in [connection inputPorts]) { if ([[port mediaType] isEqual:AVMediaTypeVideo]) { _videoConnection = connection; break; } } if (_videoConnection) { break; } } if (_videoConnection) { [session startRunning]; NSLock *lock = [[[NSLock alloc] init] autorelease]; [lock lock]; _isCaptureSessionStarted = true; [lock unlock]; } } } else { NSLog(@"%@",[error localizedDescription]); } } } - (void) captureStillImage { [_output captureStillImageAsynchronouslyFromConnection:_videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) { NSLock *lock = [[[NSLock alloc] init] autorelease]; [lock lock]; _isCaptureSessionStarted = false; [lock unlock]; if (imageDataSampleBuffer != NULL) { NSData *bitmap = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; // You can get image here via [[UIImage alloc] initWithData:bitmap] } }]; }