AVAssetWritter不适用于audio

我试图让audio与iOS应用程序的video一起工作。 video很好。 没有audiologging到文件(我的iPhone扬声器工作。)

这里是init的设置:

session = [[AVCaptureSession alloc] init]; menu->session = session; menu_open = NO; session.sessionPreset = AVCaptureSessionPresetMedium; camera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; microphone = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; menu->camera = camera; [session beginConfiguration]; [camera lockForConfiguration:nil]; if([camera isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]){ camera.exposureMode = AVCaptureExposureModeContinuousAutoExposure; } if([camera isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]){ camera.focusMode = AVCaptureFocusModeContinuousAutoFocus; } if([camera isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]){ camera.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance; } if ([camera hasTorch]) { if([camera isTorchModeSupported:AVCaptureTorchModeOn]){ [camera setTorchMode:AVCaptureTorchModeOn]; } } [camera unlockForConfiguration]; [session commitConfiguration]; AVCaptureDeviceInput * camera_input = [AVCaptureDeviceInput deviceInputWithDevice:camera error:nil]; [session addInput:camera_input]; microphone_input = [[AVCaptureDeviceInput deviceInputWithDevice:microphone error:nil] retain]; AVCaptureVideoDataOutput * output = [[[AVCaptureVideoDataOutput alloc] init] autorelease]; output.videoSettings = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]; [session addOutput:output]; output.minFrameDuration = CMTimeMake(1,30); dispatch_queue_t queue = dispatch_queue_create("MY QUEUE", NULL); [output setSampleBufferDelegate:self queue:queue]; dispatch_release(queue); audio_output = [[[AVCaptureAudioDataOutput alloc] init] retain]; queue = dispatch_queue_create("MY QUEUE", NULL); AudioOutputBufferDelegate * special_delegate = [[[AudioOutputBufferDelegate alloc] init] autorelease]; special_delegate->normal_delegate = self; [special_delegate retain]; [audio_output setSampleBufferDelegate:special_delegate queue:queue]; dispatch_release(queue); [session startRunning]; 

这是录音的开始和结束:

 if (recording) { //Hence stop recording [video_button setTitle:@"Video" forState: UIControlStateNormal]; recording = NO; [writer_input markAsFinished]; [audio_writer_input markAsFinished]; [video_writer endSessionAtSourceTime: CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate: start_time],30)]; [video_writer finishWriting]; UISaveVideoAtPathToSavedPhotosAlbum(temp_url,self,@selector(video:didFinishSavingWithError:contextInfo:),nil); [start_time release]; [temp_url release]; [av_adaptor release]; [microphone lockForConfiguration:nil]; [session beginConfiguration]; [session removeInput:microphone_input]; [session removeOutput:audio_output]; [session commitConfiguration]; [microphone unlockForConfiguration]; [menu restateConfigiration]; [vid_off play]; }else{ //Start recording [vid_on play]; [microphone lockForConfiguration:nil]; [session beginConfiguration]; [session addInput:microphone_input]; [session addOutput:audio_output]; [session commitConfiguration]; [microphone unlockForConfiguration]; [menu restateConfigiration]; [video_button setTitle:@"Stop" forState: UIControlStateNormal]; recording = YES; NSError *error = nil; NSFileManager * file_manager = [[NSFileManager alloc] init]; temp_url = [[NSString alloc] initWithFormat:@"%@/%@", NSTemporaryDirectory(), @"temp.mp4"]; [file_manager removeItemAtPath: temp_url error:NULL]; [file_manager release]; video_writer = [[AVAssetWriter alloc] initWithURL: [NSURL fileURLWithPath:temp_url] fileType: AVFileTypeMPEG4 error: &error]; NSDictionary *video_settings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey,[NSNumber numberWithInt:360], AVVideoWidthKey,[NSNumber numberWithInt:480], AVVideoHeightKey,nil]; writer_input = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:video_settings] retain]; AudioChannelLayout acl; bzero( &acl, sizeof(acl)); acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; audio_writer_input = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings: [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,[NSNumber numberWithInt: 1], AVNumberOfChannelsKey,[NSNumber numberWithFloat: 44100.0], AVSampleRateKey,[NSNumber numberWithInt: 64000], AVEncoderBitRateKey,[NSData dataWithBytes: &acl length: sizeof(acl) ], AVChannelLayoutKey,nil]] retain]; audio_writer_input.expectsMediaDataInRealTime = YES; av_adaptor = [[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput: writer_input sourcePixelBufferAttributes:NULL] retain]; [video_writer addInput:writer_input]; [video_writer addInput: audio_writer_input]; [video_writer startWriting]; [video_writer startSessionAtSourceTime: CMTimeMake(0,1)]; start_time = [[NSDate alloc] init]; } 

这里是audio的代表:

 @implementation AudioOutputBufferDelegate -(void)captureOutput: (AVCaptureOutput *) captureOutput didOutputSampleBuffer: (CMSampleBufferRef) sampleBuffer fromConnection: (AVCaptureConnection *) conenction{ if (normal_delegate->recording) { CMSampleBufferSetOutputPresentationTimeStamp(sampleBuffer,CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate: normal_delegate->start_time],30)); [normal_delegate->audio_writer_input appendSampleBuffer: sampleBuffer]; } } @end 

video方法并不重要,因为它的工作原理。 “restateConfigiration”只是整理会话configuration,否则火炬熄灭等:

 [session beginConfiguration]; switch (quality) { case Low: session.sessionPreset = AVCaptureSessionPresetLow; break; case Medium: session.sessionPreset = AVCaptureSessionPreset640x480; break; } [session commitConfiguration]; [camera lockForConfiguration:nil]; if([camera isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]){ camera.exposureMode = AVCaptureExposureModeContinuousAutoExposure; } if([camera isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]){ camera.focusMode = AVCaptureFocusModeContinuousAutoFocus; } if([camera isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]){ camera.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance; } if ([camera hasTorch]) { if (torch) { if([camera isTorchModeSupported:AVCaptureTorchModeOn]){ [camera setTorchMode:AVCaptureTorchModeOn]; } }else{ if([camera isTorchModeSupported:AVCaptureTorchModeOff]){ [camera setTorchMode:AVCaptureTorchModeOff]; } } } [camera unlockForConfiguration]; 

感谢您的任何帮助。

AVAssetWriter和audio

这可能与链接文章中提到的问题相同。 尝试注释掉这些行

 [writer_input markAsFinished]; [audio_writer_input markAsFinished]; [video_writer endSessionAtSourceTime: CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate: start_time],30)]; 

编辑

我不知道你设置演示时间戳的方式是否一定是错误的。 我处理这个问题的方法是在开始时将本地variables设置为0。 然后当我的代表收到我的第一个数据包时:

 if (_startTime.value == 0) { _startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); } 

接着

 [bufferWriter->writer startWriting]; [bufferWriter->writer startSessionAtSourceTime:_startTime]; 

当您计算每个接收数据包的时间差时,您的代码看起来是有效的。 但是,AVFoundation会为您计算此值,并优化交错容器中放置的时间戳。 我不确定的另一件事是每个CMSampleBufferRefaudio包含多个1数据缓冲区,其中每个数据缓冲区都有它自己的PTS。 我不确定设置PTS是否自动调整所有其他数据缓冲区。

在我的代码与你不同的地方是我使用一个单一的调度队列的audio和video。 在我使用的callback(删除了一些代码)。

 switch (bufferWriter->writer.status) { case AVAssetWriterStatusUnknown: if (_startTime.value == 0) { _startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); } [bufferWriter->writer startWriting]; [bufferWriter->writer startSessionAtSourceTime:_startTime]; //Break if not ready, otherwise fall through. if (bufferWriter->writer.status != AVAssetWriterStatusWriting) { break ; } case AVAssetWriterStatusWriting: if( captureOutput == self.captureManager.audioOutput) { if( !bufferWriter->audioIn.readyForMoreMediaData) { break; } @try { if( ![bufferWriter->audioIn appendSampleBuffer:sampleBuffer] ) { [self delegateMessage:@"Audio Writing Error" withType:ERROR]; } } @catch (NSException *e) { NSLog(@"Audio Exception: %@", [e reason]); } } else if( captureOutput == self.captureManager.videoOutput ) { if( !bufferWriter->videoIn.readyForMoreMediaData) { break;; } @try { if (!frontCamera) { if( ![bufferWriter->videoIn appendSampleBuffer:sampleBuffer] ) { [self delegateMessage:@"Video Writing Error" withType:ERROR]; } } else { CMTime pt = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); flipBuffer(sampleBuffer, pixelBuffer); if( ![bufferWriter->adaptor appendPixelBuffer:pixelBuffer withPresentationTime:pt] ) { [self delegateMessage:@"Video Writing Error" withType:ERROR]; } } } @catch (NSException *e) { NSLog(@"Video Exception Exception: %@", [e reason]); } } break; case AVAssetWriterStatusCompleted: return; case AVAssetWriterStatusFailed: [self delegateMessage:@"Critical Error Writing Queues" withType:ERROR]; bufferWriter->writer_failed = YES ; _broadcastError = YES; [self stopCapture] ; return; case AVAssetWriterStatusCancelled: break; default: break; }