AVCaptureSession addInput问题与ios8

我有一个AVCaptureSession的应用程序与以前的iOS版本正常工作,但后来我试着运行与ios8,应用程序坠毁零星的设备上。 但问题没有解决。 例外进入“[会话addInput:input];” 。 请咨询如何解决。 请validation我的下面的代码,并得到错误在[会话addInput:input];

打印错误描述:Error Domain = AVFoundationErrorDomain Code = -11852“不能使用后退摄像头”UserInfo = 0x17c076e0 {NSLocalizedDescription =不能使用后退摄像头,AVErrorDeviceKey =,NSLocalizedFailureReason =这个程序没有被授权使用后视镜。

#import "CameraViewController.h" #import "MAImagePickerControllerAdjustViewController.h" #import "PopupViewController.h" #import "MAImagePickerFinalViewController.h" @implementation CameraViewController @synthesize vImagePreview; @synthesize vImage; @synthesize stillImageOutput; @synthesize lFrameCount; @synthesize session; @synthesize device; @synthesize oneOff; @synthesize captureManager = _captureManager; @synthesize flashButton = _flashButton; @synthesize vImage1; @synthesize vImage2; @synthesize vImage3; @synthesize vImage4; @synthesize vImage5; @synthesize vImage6; ///////////////////////////////////////////////////////////////////// #pragma mark - UI Actions ///////////////////////////////////////////////////////////////////// -(IBAction) captureNow { AVCaptureConnection *videoConnection = nil; for (AVCaptureConnection *connection in stillImageOutput.connections) { for (AVCaptureInputPort *port in [connection inputPorts]) { if ([[port mediaType] isEqual:AVMediaTypeVideo] ) { videoConnection = connection; break; } } if (videoConnection) { break; } } NSLog(@"about to request a capture from: %@", stillImageOutput); [stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) { CFDictionaryRef exifAttachments = CMGetAttachment( imageSampleBuffer, kCGImagePropertyExifDictionary, NULL); if (exifAttachments) { // Do something with the attachments. NSLog(@"attachements: %@", exifAttachments); } else NSLog(@"no attachments"); NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer]; UIImage *image = [[UIImage alloc] initWithData:imageData]; NSUserDefaults *standardUserDefaults = [NSUserDefaults standardUserDefaults]; NSString *val1 = nil; if (standardUserDefaults) { val1 = [standardUserDefaults objectForKey:@"clickTypeTwo"]; } if([val1 isEqualToString:@"cameraType"]) { dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ [session stopRunning]; }); FinalViewController *finalView; if ([[UIScreen mainScreen] bounds].size.height == 568) finalView = [[FinalViewController alloc] initWithNibName:IS_IPAD()?@"FinalViewController_iPad":@"FinalViewController" bundle:nil]; else finalView =[[FinalViewController alloc] initWithNibName:IS_IPAD()?@"FinalViewController_iPad":@"FinalViewController" bundle:nil]; finalView.sourceImage = image; //finalView.imageFrameEdited = YES; CATransition* transition = [CATransition animation]; transition.duration = 0.4; transition.type = kCATransitionFade; transition.subtype = kCATransitionFromBottom; [self.navigationController.view.layer addAnimation:transition forKey:kCATransition]; [self.navigationController pushViewController:finalView animated:NO]; } else { [session stopRunning]; AdjustViewController *adjustViewController; if ([[UIScreen mainScreen] bounds].size.height == 568) adjustViewController = [[AdjustViewController alloc] initWithNibName:IS_IPAD()?@"AdjustViewController_iPad":@"AdjustViewController" bundle:nil]; else adjustViewController =[[AdjustViewController alloc] initWithNibName:IS_IPAD()?@"AdjustViewController_iPad":@"AdjustViewController" bundle:nil]; adjustViewController.sourceImage = image; CATransition* transition = [CATransition animation]; transition.duration = 0.4; transition.type = kCATransitionFade; transition.subtype = kCATransitionFromBottom; [self.navigationController.view.layer addAnimation:transition forKey:kCATransition]; [self.navigationController pushViewController:adjustViewController animated:NO]; } }]; } -(void)cropImageViewControllerDidFinished:(UIImage *)image{ FinalViewController *finalView; if ([[UIScreen mainScreen] bounds].size.height == 568) finalView = [[MAImagePickerFinalViewController alloc] initWithNibName:IS_IPAD()?@"FinalViewController_iPad":@"FinalViewController" bundle:nil]; else finalView =[[MAImagePickerFinalViewController alloc] initWithNibName:IS_IPAD()?@"FinalViewController_iPad":@"FinalViewController" bundle:nil]; finalView.sourceImage = image; //finalView.imageFrameEdited = YES; CATransition* transition = [CATransition animation]; transition.duration = 0.4; transition.type = kCATransitionFade; transition.subtype = kCATransitionFromBottom; [self.navigationController.view.layer addAnimation:transition forKey:kCATransition]; [self.navigationController pushViewController:finalView animated:NO]; } ///////////////////////////////////////////////////////////////////// #pragma mark - Video Frame Delegate ///////////////////////////////////////////////////////////////////// - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { //NSLog(@"got frame"); iFrameCount++; // Update Display // We are running the the context of the capture session. To update the UI in real time, We have to do this in the context of the main thread. NSString * frameCountString = [[NSString alloc] initWithFormat:@"%4.4d", iFrameCount]; [lFrameCount performSelectorOnMainThread: @selector(setText:) withObject:frameCountString waitUntilDone:YES]; //NSLog(@"frame count %d", iFrameCount); } - (IBAction)showLeftSideBar { //[self dismissModalViewControllerAnimated:YES]; if ([[SidebarViewController share] respondsToSelector:@selector(showSideBarControllerWithDirection:)]) { [[SidebarViewController share] showSideBarControllerWithDirection:SideBarShowDirectionLeft]; } } - (IBAction)showRightSideBar:(id)sender { } - (IBAction)flipCamera:(id)sender { AVCaptureDevicePosition desiredPosition; if (isUsingFrontFacingCamera) desiredPosition = AVCaptureDevicePositionBack; else desiredPosition = AVCaptureDevicePositionFront; for (AVCaptureDevice *d in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) { if ([d position] == desiredPosition) { [[self session] beginConfiguration]; AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:d error:nil]; for (AVCaptureInput *oldInput in [[self session] inputs]) { [[self session] removeInput:oldInput]; } [[self session] addInput:input]; [[self session] commitConfiguration]; break; } } isUsingFrontFacingCamera = !isUsingFrontFacingCamera; } BOOL isUsingFrontFacingCamera; ///////////////////////////////////////////////////////////////////// #pragma mark - Guts ///////////////////////////////////////////////////////////////////// - (void)didReceiveMemoryWarning { [super didReceiveMemoryWarning]; // Release any cached data, images, etc that aren't in use. } ///////////////////////////////////////////////////////////////////// #pragma mark - View lifecycle ///////////////////////////////////////////////////////////////////// - (void)viewDidLoad { [super viewDidLoad]; } - (void)viewDidUnload { [super viewDidUnload]; // Release any retained subviews of the main view. // eg self.myOutlet = nil; } - (void)viewWillAppear:(BOOL)animated { [super viewWillAppear:animated]; } - (void)viewDidAppear:(BOOL)animated { [super viewDidAppear:animated]; flashIsOn=YES; ///////////////////////////////////////////////////////////////////////////// // Create a preview layer that has a capture session attached to it. // Stick this preview layer into our UIView. ///////////////////////////////////////////////////////////////////////////// session = [[AVCaptureSession alloc] init]; session.sessionPreset = AVCaptureSessionPreset640x480; CALayer *viewLayer = self.vImagePreview.layer; NSLog(@"viewLayer = %@", viewLayer); // viewLayer.frame = CGRectMake(-70, 150, 480, 336); // UIGraphicsBeginImageContextWithOptions(CGSizeMake(400, 400), NO, 1); AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session]; CGRect bounds=vImagePreview.layer.bounds; captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; captureVideoPreviewLayer.bounds=bounds; captureVideoPreviewLayer.position=CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds)); captureVideoPreviewLayer.frame = self.vImagePreview.bounds; [self.vImagePreview.layer addSublayer:captureVideoPreviewLayer]; //[self addVideoInputFrontCamera:YES]; // set to YES for Front Camera, No for Back camera device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; NSError *error = nil; AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error]; if (!input) { // Handle the error appropriately. NSLog(@"ERROR: trying to open camera: %@", error); } [session addInput:input]; ///////////////////////////////////////////////////////////// // OUTPUT #1: Still Image ///////////////////////////////////////////////////////////// // Add an output object to our session so we can get a still image // We retain a handle to the still image output and use this when we capture an image. stillImageOutput = [[AVCaptureStillImageOutput alloc] init]; NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys: AVVideoCodecJPEG, AVVideoCodecKey, nil]; [stillImageOutput setOutputSettings:outputSettings]; [session addOutput:stillImageOutput]; ///////////////////////////////////////////////////////////// // OUTPUT #2: Video Frames ///////////////////////////////////////////////////////////// // Create Video Frame Outlet that will send each frame to our delegate AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init]; captureOutput.alwaysDiscardsLateVideoFrames = YES; //captureOutput.minFrameDuration = CMTimeMake(1, 3); // deprecated in IOS5 // We need to create a queue to funnel the frames to our delegate dispatch_queue_t queue; queue = dispatch_queue_create("cameraQueue", NULL); [captureOutput setSampleBufferDelegate:self queue:queue]; dispatch_release(queue); // Set the video output to store frame in BGRA (It is supposed to be faster) NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey; // let's try some different keys, NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key]; [captureOutput setVideoSettings:videoSettings]; [session addOutput:captureOutput]; ///////////////////////////////////////////////////////////// // start the capture session [session startRunning]; ///////////////////////////////////////////////////////////////////////////// // initialize frame counter iFrameCount = 0; } - (void)viewWillDisappear:(BOOL)animated { [super viewWillDisappear:animated]; } - (void)viewDidDisappear:(BOOL)animated { [super viewDidDisappear:animated]; [session stopRunning]; } - (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation { // Return YES for supported orientations if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone) { return (interfaceOrientation != UIInterfaceOrientationPortraitUpsideDown); } else { return YES; } } - (IBAction)cancelButton:(id)sender{ } - (IBAction)flashOn:(id)sender{ Class captureDeviceClass = NSClassFromString(@"AVCaptureDevice"); if (captureDeviceClass != nil) { if ([device hasTorch] && [device hasFlash]){ [device lockForConfiguration:nil]; if (flashIsOn) { [device setTorchMode:AVCaptureTorchModeOn]; [device setFlashMode:AVCaptureFlashModeOn]; oneOff.text=@"On"; [_flashButton setImage:[UIImage imageNamed:@"flash-on-button"]]; _flashButton.accessibilityLabel = @"Disable Camera Flash"; flashIsOn = NO; //define as a variable/property if you need to know status } else { [_flashButton setImage:[UIImage imageNamed:@"flash-off-button"]]; _flashButton.accessibilityLabel = @"Enable Camera Flash"; oneOff.text=@"Off"; [device setTorchMode:AVCaptureTorchModeOff]; [device setFlashMode:AVCaptureFlashModeOff]; flashIsOn = YES; } [device unlockForConfiguration]; } } } - (void)dealloc { [[self session] stopRunning]; [super dealloc]; } - (void)storeFlashSettingWithBool:(BOOL)flashSetting { [[NSUserDefaults standardUserDefaults] setBool:flashSetting forKey:kCameraFlashDefaultsKey]; [[NSUserDefaults standardUserDefaults] synchronize]; } @end 

请检查您的设备设置。 转到隐私—>相机—>检查您的应用程序的设置—–>打开它

运行应用程序。 有用。 干杯

改变你的dealloc方法

 [self.captureSession removeInput:self.videoInput]; [self.captureSession removeOutput:self.videoOutput]; self.captureSession = nil; self.videoOutput = nil; self.videoInput = nil; 

今天我们遇到了一个问题,基本上来自iOS 8.0.2及以上版本,访问摄像头需要对摄像头进行隐私设置,而不是摄像头滚动,一旦启用了代码,就可以工作。

在我的应用程序今天看到相同的错误,我正在处理它与警报,其中包含应用程序的隐私设置的设置button快捷方式。

 do { let captureInput:AVCaptureDeviceInput = try AVCaptureDeviceInput(device: self.device) ... } catch let error as NSError { let alert = UIAlertController(title:error.localizedDescription, message:error.localizedFailureReason, preferredStyle:.Alert) let settingsAction = UIAlertAction(title: "Settings", style: .Default) { (action) in UIApplication.sharedApplication().openURL(NSURL(string:UIApplicationOpenSettingsURLString)!) } alert.addAction(settingsAction) self.presentViewController(alert,animated:true,completion:nil) }