使用AVAssetWriter录制和播放audio

我已经减less了这个问题,希望得到一些帮助。

基本上这个类有两种方法,一种是开始录制audio( -recordMode ),另一种是播放audio( playMode )。 我目前在一个项目中有一个单一的视图控制器与两个button,调用相应的方法(录音,播放)的类。 没有其他variables,这个类是独立的。

然而,它不会播放任何东西,我不明白为什么。 当我尝试播放文件时,我得到的文件大小为0,并且出现错误,因为您无法使用nil引用初始化AVAudioPlayer 。 但我不明白为什么这个文件是空的或为什么self.outputPathnil

.h文件

 #import <AVFoundation/AVFoundation.h> @interface MicCommunicator : NSObject<AVCaptureAudioDataOutputSampleBufferDelegate> @property(nonatomic,retain) NSURL *outputPath; @property(nonatomic,retain) AVCaptureSession * captureSession; @property(nonatomic,retain) AVCaptureAudioDataOutput * output; -(void)beginStreaming; -(void)playMode; -(void)recordMode; @end 

.m文件:

 @implementation MicCommunicator { AVAssetWriter *assetWriter; AVAssetWriterInput *assetWriterInput; } @synthesize captureSession = _captureSession; @synthesize output = _output; @synthesize outputPath = _outputPath; -(id)init { if ((self = [super init])) { NSArray *searchPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); self.outputPath = [NSURL fileURLWithPath:[[searchPaths objectAtIndex:0] stringByAppendingPathComponent:@"micOutput.output"]]; AudioChannelLayout acl; bzero(&acl, sizeof(acl)); acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; //kAudioChannelLayoutTag_Stereo; NSDictionary *audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt: kAudioFormatULaw],AVFormatIDKey, [NSNumber numberWithFloat:8000.0],AVSampleRateKey,//was 44100.0 [NSData dataWithBytes: &acl length: sizeof( AudioChannelLayout ) ], AVChannelLayoutKey, [NSNumber numberWithInt:1],AVNumberOfChannelsKey, [NSNumber numberWithInt:8000.0],AVEncoderBitRateKey, nil]; assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings]; [assetWriterInput setExpectsMediaDataInRealTime:YES]; assetWriter = [[AVAssetWriter assetWriterWithURL:_outputPath fileType:AVFileTypeWAVE error:nil] retain]; [assetWriter addInput:assetWriterInput]; } return self; } -(void)dealloc { [assetWriter release]; [super dealloc]; } //conveniance methods -(void)playMode { [self stopRecording]; NSError *error; AVAudioPlayer * audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:self.outputPath error:&error]; audioPlayer.numberOfLoops = -1; if (audioPlayer == nil){ NSLog(@"error: %@",[error description]); }else{ NSLog(@"playing"); [audioPlayer play]; } } -(void)recordMode { [self beginStreaming]; } -(void)stopRecording { [self.captureSession stopRunning]; [assetWriterInput markAsFinished]; [assetWriter finishWriting]; NSDictionary *outputFileAttributes = [[NSFileManager defaultManager] attributesOfItemAtPath:[NSString stringWithFormat:@"%@",self.outputPath] error:nil]; NSLog (@"done. file size is %llu", [outputFileAttributes fileSize]); } //starts audio recording -(void)beginStreaming { self.captureSession = [[AVCaptureSession alloc] init]; AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; NSError *error = nil; AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error]; if (audioInput) [self.captureSession addInput:audioInput]; else { NSLog(@"No audio input found."); return; } AVCaptureAudioDataOutput *output = [[AVCaptureAudioDataOutput alloc] init]; dispatch_queue_t outputQueue = dispatch_queue_create("micOutputDispatchQueue", NULL); [output setSampleBufferDelegate:self queue:outputQueue]; dispatch_release(outputQueue); [self.captureSession addOutput:output]; [assetWriter startWriting]; [self.captureSession startRunning]; } //callback -(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { AudioBufferList audioBufferList; NSMutableData *data= [[NSMutableData alloc] init]; CMBlockBufferRef blockBuffer; CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer, NULL, &audioBufferList, sizeof(audioBufferList), NULL, NULL, 0, &blockBuffer); //for (int y = 0; y < audioBufferList.mNumberBuffers; y++) { // AudioBuffer audioBuffer = audioBufferList.mBuffers[y]; // Float32 *frame = (Float32*)audioBuffer.mData; // // [data appendBytes:frame length:audioBuffer.mDataByteSize]; //} // append [data bytes] to your NSOutputStream // These two lines write to disk, you may not need this, just providing an example [assetWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)]; [assetWriterInput appendSampleBuffer:sampleBuffer]; CFRelease(blockBuffer); blockBuffer=NULL; [data release]; } @end 

每个苹果支持:

因此这是错误 – 文件被创build,一些样本被写入成功,然后追加开始失败的原因不明。

看来AVAssetWriter只有使用这些设置才会失败。

AudioQueue是应该用于ulawaudio