IOS 8电影从后面和前面照相机

如何在iOS 8或甚至iOS 7上录制(如果可能),并在相机之间切换? 我已经看到应用程序从前置摄像头进行录制,然后切换到后置摄像头,当电影完成时,您可以从两个摄像头完全看到它的拍摄? 我觉得Glide应用程序有它,也许Snapchat(不知道最后一个)

经过漫长的search和努力,这里是代码…感谢您的build议马蒂奇指出我正确的道路,我做了一些更好的事情,没有实际上合并文件在最后

#import <UIKit/UIKit.h> #import <AVFoundation/AVFoundation.h> #import <CoreGraphics/CoreGraphics.h> @interface ViewController : UIViewController <AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate> { IBOutlet UIView *viewCanvasRecording; IBOutlet UIButton *btnRecord; AVCaptureDevice *frontCamera; // A pointer to the front camera AVCaptureDevice *backCamera; // A pointer to the back camera AVCaptureDeviceInput *captureFrontInput; AVCaptureDeviceInput *captureBackInput; __block AVAssetWriter *_assetWriter; __block AVAssetWriterInput *_videoWriterInput; __block AVAssetWriterInput *_audioWriterInput; __block AVCaptureVideoDataOutput *videoOutput; dispatch_queue_t _captureQueue; BOOL currentFrontCamera; BOOL isCapturingInput; NSURL *recordingFile; } @property (nonatomic, strong) AVCaptureSession *captureSession; @property (nonatomic, strong) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer; @end // ===== now the m file // // ViewController.m // record // // Created by Catalin on 31/05/15. // Copyright (c) 2015 lamobratory.com. All rights reserved. // #import "ViewController.h" #import <AssetsLibrary/AssetsLibrary.h> #import <MediaPlayer/MediaPlayer.h> @implementation ViewController - (void)viewDidLoad { [super viewDidLoad]; currentFrontCamera=NO; isCapturingInput=NO; _assetWriter=nil; [self findCamera:YES]; // init the front camera [self findCamera:NO]; // init the back camera [self performSelector:@selector(initCaptureWithCamera) withObject:nil afterDelay:1]; } - (void)didReceiveMemoryWarning { [super didReceiveMemoryWarning]; } -(IBAction)tapStartRecord:(id)sender { if([[btnRecord titleForState:UIControlStateNormal] isEqualToString:@"START"]) { [btnRecord setTitle:@"STOP" forState:UIControlStateNormal]; isCapturingInput=YES; } else if([[btnRecord titleForState:UIControlStateNormal] isEqualToString:@"STOP"]) { isCapturingInput=NO; dispatch_async(_captureQueue, ^{ [_assetWriter finishWritingWithCompletionHandler:^{ ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; [library writeVideoAtPathToSavedPhotosAlbum:recordingFile completionBlock:^(NSURL *assetURL, NSError *error) { if (error) { NSLog(@"assets library failed (%@)", error); } else { NSLog(@"file saved to library"); } [self.captureSession stopRunning]; _assetWriter=nil; recordingFile = nil; }]; }]; }); [btnRecord setTitle:@"START" forState:UIControlStateNormal]; } } -(IBAction)tapSwitchCamera:(id)sender { // switch outputs [self swipeCamera]; } -(void)swipeCamera { currentFrontCamera=!currentFrontCamera; // swipe camera [self.captureSession beginConfiguration]; [self.captureSession removeInput:captureBackInput]; [self.captureSession removeInput:captureFrontInput]; if(!currentFrontCamera) [self.captureSession addInput:captureBackInput]; else [self.captureSession addInput:captureFrontInput]; [self.captureSession commitConfiguration]; } #pragma mark - Camera methods -(BOOL)findCamera:(BOOL)useFrontCamera { // 0. Make sure we initialize our camera pointer: AVCaptureDevice *m_camera = NULL; // 1. Get a list of available devices: // specifying AVMediaTypeVideo will ensure we only get a list of cameras, no microphones NSArray * devices = [ AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo ]; // 2. Iterate through the device array and if a device is a camera, check if it's the one we want: for ( AVCaptureDevice * device in devices ) { if ( useFrontCamera && AVCaptureDevicePositionFront == [ device position ] ) { // We asked for the front camera and got the front camera, now keep a pointer to it: m_camera = device; } else if ( !useFrontCamera && AVCaptureDevicePositionBack == [ device position ] ) { // We asked for the back camera and here it is: m_camera = device; } } // 3. Set a frame rate for the camera: if ( NULL != m_camera ) { // We firt need to lock the camera, so noone else can mess with its configuration: if ( [ m_camera lockForConfiguration: NULL ] ) { // Set a minimum frame rate of 10 frames per second [ m_camera setActiveVideoMinFrameDuration: CMTimeMake( 1, 10 ) ]; // and a maximum of 30 frames per second [ m_camera setActiveVideoMaxFrameDuration: CMTimeMake( 1, 30 ) ]; [ m_camera unlockForConfiguration ]; } } if(!useFrontCamera) backCamera=m_camera; else frontCamera=m_camera; // 4. If we've found the camera we want, return true return ( NULL != m_camera ); } -(void) setupWriter { NSError *error = nil; _assetWriter = [[AVAssetWriter alloc] initWithURL:recordingFile fileType:AVFileTypeQuickTimeMovie error:&error]; NSDictionary* actual = videoOutput.videoSettings; int _cy = [[actual objectForKey:@"Height"] intValue]; int _cx = [[actual objectForKey:@"Width"] intValue]; NSDictionary* settings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey, [NSNumber numberWithInt: _cx], AVVideoWidthKey, [NSNumber numberWithInt: _cy], AVVideoHeightKey, nil]; _videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings]; _videoWriterInput.expectsMediaDataInRealTime = YES; _videoWriterInput.transform=CGAffineTransformMakeRotation(M_PI/2); // else it will shot in landscape even though we hold our phone in portrait mode /*settings = [NSDictionary dictionaryWithObjectsAndKeys: [ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey, [ NSNumber numberWithInt: 1], AVNumberOfChannelsKey, [ NSNumber numberWithFloat: 16000], AVSampleRateKey, [ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey, nil];*/ _audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeAudio outputSettings: nil]; _audioWriterInput.expectsMediaDataInRealTime = YES; // add input if([_assetWriter canAddInput:_videoWriterInput]) { NSLog(@"added output to video"); [_assetWriter addInput:_videoWriterInput]; } if([_assetWriter canAddInput:_audioWriterInput]) { NSLog(@"added output to audio"); [_assetWriter addInput:_audioWriterInput]; } } - (void)initCaptureWithCamera { if(self.captureVideoPreviewLayer!=nil) // refresh the views [self.captureVideoPreviewLayer removeFromSuperlayer]; // remove old file at path if exists recordingFile = [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@/tmp_vid.mov", NSTemporaryDirectory()]]; [[NSFileManager defaultManager] removeItemAtURL:recordingFile error:nil]; // ========================= configure session self.captureSession = [[AVCaptureSession alloc] init]; NSString* preset = 0; if (!preset) { preset = AVCaptureSessionPresetHigh; } self.captureSession.sessionPreset = preset; // ========================= input devices from camera and mic NSError * error = NULL; captureFrontInput = [AVCaptureDeviceInput deviceInputWithDevice:frontCamera error: &error]; captureBackInput = [AVCaptureDeviceInput deviceInputWithDevice:backCamera error: &error]; if ( NULL != error ) return; if ([self.captureSession canAddInput:captureBackInput]) { NSLog(@"added input from camera"); [self.captureSession addInput:captureBackInput]; } // audio input from default mic AVCaptureDevice* mic = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; AVCaptureDeviceInput* micinput = [AVCaptureDeviceInput deviceInputWithDevice:mic error:nil]; if ([self.captureSession canAddInput:micinput]) { NSLog(@"added input from mic"); [self.captureSession addInput:micinput]; } // ========================= now output forms: video and audio in asset writter _captureQueue = dispatch_queue_create("com.myapp.capture", DISPATCH_QUEUE_SERIAL); videoOutput = [[AVCaptureVideoDataOutput alloc] init]; AVCaptureAudioDataOutput *audioOutput = [[AVCaptureAudioDataOutput alloc] init]; [videoOutput setSampleBufferDelegate:self queue:_captureQueue]; [audioOutput setSampleBufferDelegate:self queue:_captureQueue]; // NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey; // NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; // NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key]; // // [videoOutput setVideoSettings:videoSettings]; if ([self.captureSession canAddOutput:videoOutput]) { [self.captureSession addOutput:videoOutput]; } if ([self.captureSession canAddOutput:audioOutput]) { [self.captureSession addOutput:audioOutput]; } // ====================================================================== // add the preview layer to see what we film if (!self.captureVideoPreviewLayer) self.captureVideoPreviewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession]; // if you want to adjust the previewlayer frame, here! self.captureVideoPreviewLayer.frame = viewCanvasRecording.bounds; self.captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; [viewCanvasRecording.layer addSublayer: self.captureVideoPreviewLayer]; [self.captureSession startRunning]; } -(void)captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection*)connection { BOOL frameFromVideoCaptured = NO; @synchronized(self) { if (!isCapturingInput) // we haven't started filming yet just ignore the frames return; if(!_assetWriter) { [self setupWriter]; } frameFromVideoCaptured=(captureOutput==videoOutput); } // pass frame to the assset writer [self writeFrame:sampleBuffer isVideo:frameFromVideoCaptured]; } -(BOOL)writeFrame:(CMSampleBufferRef)sampleBuffer isVideo:(BOOL)isVideo { if (CMSampleBufferDataIsReady(sampleBuffer)) { if (_assetWriter.status == AVAssetWriterStatusUnknown) { CMTime startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); [_assetWriter startWriting]; [_assetWriter startSessionAtSourceTime:startTime]; } if (_assetWriter.status == AVAssetWriterStatusFailed) { NSLog(@"writer error %@", _assetWriter.error.localizedDescription); return NO; } if (isVideo) { if (_videoWriterInput.readyForMoreMediaData == YES) { [_videoWriterInput appendSampleBuffer:sampleBuffer]; return YES; } } else { if (_audioWriterInput.readyForMoreMediaData) { [_audioWriterInput appendSampleBuffer:sampleBuffer]; return YES; } } } return NO; } @end 

我正在使用多文件过程。 在交换时,我将删除捕获会话中的input和输出,并重新附加新输出( AVCaptureMovieFileOutput )并附加正确的input。 在继续录制AVCaptureMovieFileOutput获得不同的文件名,那么以前有,所以我实际上创build了一系列的文件。 另外注意AVCaptureMovieFileOutput必须是一个新的对象,你可能不会重用前一个,因为它很可能还没有停止写入。 input被重用。

毕竟有不同的方式来拼接video在一起,取决于你需要的工作水平。

所以我的基本组件是AVCaptureSessionAVCaptureMovieFileOutputAVCaptureDeviceInput 。 此外,还附有一个AVCaptureVideoPreviewLayer ,以查看您正在录制的内容。