如何使用Swift在iOS中同时录制和播放音频?

在Objective-C中,同时录制和播放音频非常简单。 互联网上有大量的示例代码。 但我想使用Swift中的Audio Unit / Core Audio同时录制和播放音频。 使用Swift有很多帮助和示例代码。 我找不到任何可以说明如何实现这一目标的帮助。

我正在努力与波纹管代码。

let preferredIOBufferDuration = 0.005 let kInputBus = AudioUnitElement(1) let kOutputBus = AudioUnitElement(0) init() { // This is my Audio Unit settings code. var status: OSStatus do { try AVAudioSession.sharedInstance().setPreferredIOBufferDuration(preferredIOBufferDuration) } catch let error as NSError { print(error) } var desc: AudioComponentDescription = AudioComponentDescription() desc.componentType = kAudioUnitType_Output desc.componentSubType = kAudioUnitSubType_VoiceProcessingIO desc.componentFlags = 0 desc.componentFlagsMask = 0 desc.componentManufacturer = kAudioUnitManufacturer_Apple let inputComponent: AudioComponent = AudioComponentFindNext(nil, &desc) status = AudioComponentInstanceNew(inputComponent, &audioUnit) checkStatus(status) var flag = UInt32(1) status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, kInputBus, &flag, UInt32(sizeof(UInt32))) checkStatus(status) status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &flag, UInt32(sizeof(UInt32))) checkStatus(status) var audioFormat: AudioStreamBasicDescription! = AudioStreamBasicDescription() audioFormat.mSampleRate = 8000 audioFormat.mFormatID = kAudioFormatLinearPCM audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked audioFormat.mFramesPerPacket = 1 audioFormat.mChannelsPerFrame = 1 audioFormat.mBitsPerChannel = 16 audioFormat.mBytesPerPacket = 2 audioFormat.mBytesPerFrame = 2 status = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, kInputBus, &audioFormat, UInt32(sizeof(UInt32))) checkStatus(status) try! AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayAndRecord) status = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, kOutputBus, &audioFormat, UInt32(sizeof(UInt32))) checkStatus(status) // Set input/recording callback var inputCallbackStruct: AURenderCallbackStruct! = AURenderCallbackStruct(inputProc: recordingCallback, inputProcRefCon: UnsafeMutablePointer(unsafeAddressOf(self))) inputCallbackStruct.inputProc = recordingCallback inputCallbackStruct.inputProcRefCon = UnsafeMutablePointer(unsafeAddressOf(self)) status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, kInputBus, &inputCallbackStruct, UInt32(sizeof(UInt32))) checkStatus(status) // Set output/renderar/playback callback var renderCallbackStruct: AURenderCallbackStruct! = AURenderCallbackStruct(inputProc: playbackCallback, inputProcRefCon: UnsafeMutablePointer(unsafeAddressOf(self))) renderCallbackStruct.inputProc = playbackCallback renderCallbackStruct.inputProcRefCon = UnsafeMutablePointer(unsafeAddressOf(self)) status = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Global, kOutputBus, &renderCallbackStruct, UInt32(sizeof(UInt32))) checkStatus(status) flag = 0 status = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_ShouldAllocateBuffer, kAudioUnitScope_Output, kInputBus, &flag, UInt32(sizeof(UInt32))) } func recordingCallback(inRefCon: UnsafeMutablePointer, ioActionFlags: UnsafeMutablePointer, inTimeStamp: UnsafePointer, inBufNumber: UInt32, inNumberFrames: UInt32, ioData: UnsafeMutablePointer) -> OSStatus { print("recordingCallback got fired >>>") return noErr } func playbackCallback(inRefCon: UnsafeMutablePointer, ioActionFlags: UnsafeMutablePointer, inTimeStamp: UnsafePointer, inBufNumber: UInt32, inNumberFrames: UInt32, ioData: UnsafeMutablePointer) -> OSStatus { print("playbackCallback got fired <<<") return noErr } 

使用该代码,只会调用recordingCallback方法。 并且playbackCallback方法根本没有被触发。 我确定我在这里出错了。 有人可以帮我这个。 我正在敲打这个问题。

您正在错误地设置InputCallbackRenderCallback方法。 其他设置似乎没问题。 所以你的init方法应该是这样的。

 init() { var status: OSStatus do { try AVAudioSession.sharedInstance().setPreferredIOBufferDuration(preferredIOBufferDuration) } catch let error as NSError { print(error) } var desc: AudioComponentDescription = AudioComponentDescription() desc.componentType = kAudioUnitType_Output desc.componentSubType = kAudioUnitSubType_VoiceProcessingIO desc.componentFlags = 0 desc.componentFlagsMask = 0 desc.componentManufacturer = kAudioUnitManufacturer_Apple let inputComponent: AudioComponent = AudioComponentFindNext(nil, &desc) status = AudioComponentInstanceNew(inputComponent, &audioUnit) checkStatus(status) var flag = UInt32(1) status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, kInputBus, &flag, UInt32(sizeof(UInt32))) checkStatus(status) status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &flag, UInt32(sizeof(UInt32))) checkStatus(status) var audioFormat: AudioStreamBasicDescription! = AudioStreamBasicDescription() audioFormat.mSampleRate = 8000 audioFormat.mFormatID = kAudioFormatLinearPCM audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked audioFormat.mFramesPerPacket = 1 audioFormat.mChannelsPerFrame = 1 audioFormat.mBitsPerChannel = 16 audioFormat.mBytesPerPacket = 2 audioFormat.mBytesPerFrame = 2 status = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, kInputBus, &audioFormat, UInt32(sizeof(UInt32))) checkStatus(status) try! AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayAndRecord) status = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, kOutputBus, &audioFormat, UInt32(sizeof(UInt32))) checkStatus(status) // Set input/recording callback var inputCallbackStruct = AURenderCallbackStruct(inputProc: recordingCallback, inputProcRefCon: UnsafeMutablePointer(unsafeAddressOf(self))) AudioUnitSetProperty(audioUnit, AudioUnitPropertyID(kAudioOutputUnitProperty_SetInputCallback), AudioUnitScope(kAudioUnitScope_Global), 1, &inputCallbackStruct, UInt32(sizeof(AURenderCallbackStruct))) // Set output/renderar/playback callback var renderCallbackStruct = AURenderCallbackStruct(inputProc: playbackCallback, inputProcRefCon: UnsafeMutablePointer(unsafeAddressOf(self))) AudioUnitSetProperty(audioUnit, AudioUnitPropertyID(kAudioUnitProperty_SetRenderCallback), AudioUnitScope(kAudioUnitScope_Global), 0, &renderCallbackStruct, UInt32(sizeof(AURenderCallbackStruct))) flag = 0 status = AudioUnitSetProperty(audioUnit, kAudioUnitProperty_ShouldAllocateBuffer, kAudioUnitScope_Output, kInputBus, &flag, UInt32(sizeof(UInt32))) } 

尝试使用此代码,如果有帮助,请告诉我们。