Я использую AudioKit 4.8 вот так
class AudioKitWrapper {
/// The main output mixer (after the amplitude tracker)
private let masterMixer: AKMixer
/// The audio input
private var microphone: EZMicrophone?
/// Construction
init() {
// global settings
AKAudioFile.cleanTempDirectory()
AKSettings.defaultToSpeaker = true
AKSettings.enableRouteChangeHandling = true
AKSettings.enableCategoryChangeHandling = true
AKSettings.disableAVAudioSessionCategoryManagement = true
AKSettings.audioInputEnabled = true
AKSettings.playbackWhileMuted = false
#if DEBUG
AKSettings.enableLogging = AppConfig.AudioConfig.debugLogEnabled
#endif
// main mixer
masterMixer = AKMixer()
}
/// Start up audiokit
func startEngine(with audioInput: Bool) throws {
// connect main nodes
AudioKit.output = masterMixer
// input
microphone?.delegate = nil
microphone = nil
if audioInput {
AKSettings.enableEchoCancellation = true
let sizeOfFloat = UInt32(MemoryLayout<Float>.stride)
microphone = EZMicrophone(microphoneDelegate: self,
with: AudioStreamBasicDescription(mSampleRate: Float64(mirgaFactory.getSampleRate()),
mFormatID: kAudioFormatLinearPCM,
mFormatFlags: kLinearPCMFormatFlagIsFloat |
kAudioFormatFlagsNativeEndian |
kAudioFormatFlagIsPacked |
kAudioFormatFlagIsNonInterleaved,
mBytesPerPacket: sizeOfFloat,
mFramesPerPacket: 1,
mBytesPerFrame: sizeOfFloat,
mChannelsPerFrame: 1,
mBitsPerChannel: sizeOfFloat * 8,
mReserved: 0),
startsImmediately: false)
}
// start
try AKSettings.session.setCategory(.playAndRecord, mode: .measurement, options: .defaultToSpeaker)
try AudioKit.start()
microphone?.startFetchingAudio()
Log.d("~~~ AudioKit: started")
}
/// Stop engine
func stopEngine() throws {
try AudioKit.stop()
microphone?.stopFetchingAudio()
Log.d("~~~ AudioKit: stopped")
}
/// Attach output
func attach(audioPlayer: AKAudioPlayer) {
audioPlayer >>> masterMixer
}
/// Reset
func reset() {
masterMixer.detach()
Log.d("~~~ AudioKit: reset")
}
}
/// Handle EZMicrophone input
extension AudioKitWrapper: EZMicrophoneDelegate {
/// Playing state changed
func microphone(_ microphone: EZMicrophone!, changedPlayingState isPlaying: Bool) {
Log.i("playing state - \(isPlaying)")
}
/// Device changed
func microphone(_ microphone: EZMicrophone!, changedDevice device: EZAudioDevice!) {
Log.i("device - \(String(describing: device))")
}
/// Audio stream description
func microphone(_ microphone: EZMicrophone!, hasAudioStreamBasicDescription audioStreamBasicDescription: AudioStreamBasicDescription) {
Log.i("stream - \(audioStreamBasicDescription)")
}
/// Input buffer handling
func microphone(_ microphone: EZMicrophone!,
hasAudioReceived buffer: UnsafeMutablePointer<UnsafeMutablePointer<Float>?>!,
withBufferSize bufferSize: UInt32,
withNumberOfChannels numberOfChannels: UInt32,
atTime timestamp: UnsafePointer<AudioTimeStamp>!) {
writeSamplesIntoCircularBuffer(buffer[0]!, bufferSize, timestamp)
}
}
iOS13 - iPhone7
Вылетает с этим журналом
2019-10-11 09:37:01.917118+0200 WyntonHost[555:135079] AUBuffer.h:61:GetBufferList: EXCEPTION (-1) [mPtrState == kPtrsInvalid is false]: ""
2019-10-11 09:37:01.938441+0200 WyntonHost[555:135079] AUBuffer.h:61:GetBufferList: EXCEPTION (-1) [mPtrState == kPtrsInvalid is false]: ""
Error: Failed to fill complex buffer in float converter ('insz')
Process finished with exit code 255
iOS13 - iPadAir2
Я получаю сообщения этого журнала без сбоев (сотни из них, я полагаю, на каждый аудиофрейм)
2019-10-11 09:34:07.187762+0200 WyntonHost[376:18541] AUBuffer.h:61:GetBufferList: EXCEPTION (-1) [mPtrState == kPtrsInvalid is false]: ""
2019-10-11 09:34:07.211279+0200 WyntonHost[376:18541] AUBuffer.h:61:GetBufferList: EXCEPTION (-1) [mPtrState == kPtrsInvalid is false]: ""
iOS12 - iPhone6
Я получаю сообщения этого журнала без сбоев (сотнииз них, я предполагаю, для каждого аудио кадра)
2019-10-11 09:31:59.138013+0200 WyntonHost[537:96101] 55: EXCEPTION (-1): ""
2019-10-11 09:31:59.161233+0200 WyntonHost[537:96101] 55: EXCEPTION (-1): ""
Есть идеи, откуда приходит это сообщение?