Я надеюсь, что кто-то может мне помочь. Я новичок в Objective-c и OSX, и я пытаюсь воспроизвести аудио данные, которые я получаю через сокет, в мою аудио-очередь. Я обнаружил эту ссылку https://stackoverflow.com/a/30318859/4274654, которая решает мою проблему с круговым буфером.
Однако, когда я пытаюсь запустить свой проект, он возвращается
Возвращает ошибку (OSStatus) -10865. Вот почему код регистрирует «Ошибка включения выходной шины AudioUnit».
status = AudioUnitSetProperty(_audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &one, sizeof(one));
Вот мой код:
test.h
#import <Foundation/Foundation.h>
#import <AudioToolbox/AudioToolbox.h>
#import "TPCircularBuffer.h"
@interface Test : Communicator
@property (nonatomic) AudioComponentInstance audioUnit;
@property (nonatomic) TPCircularBuffer circularBuffer;
-(TPCircularBuffer *) outputShouldUseCircularBuffer;
-(void) start;
@end
Test.m
#import "Test.h"
#define kOutputBus 0
#define kInputBus 1
@implementation Test{
BOOL stopped;
}
static OSStatus OutputRenderCallback(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData){
Test *output = (__bridge Test*)inRefCon;
TPCircularBuffer *circularBuffer = [output outputShouldUseCircularBuffer];
if( !circularBuffer ){
SInt32 *left = (SInt32*)ioData->mBuffers[0].mData;
for(int i = 0; i < inNumberFrames; i++ ){
left[ i ] = 0.0f;
}
return noErr;
};
int32_t bytesToCopy = ioData->mBuffers[0].mDataByteSize;
SInt16* outputBuffer = ioData->mBuffers[0].mData;
uint32_t availableBytes;
SInt16 *sourceBuffer = TPCircularBufferTail(circularBuffer, &availableBytes);
int32_t amount = MIN(bytesToCopy,availableBytes);
memcpy(outputBuffer, sourceBuffer, amount);
TPCircularBufferConsume(circularBuffer,amount);
return noErr;
}
-(void) start
{
[self circularBuffer:&_circularBuffer withSize:24576*5];
stopped = NO;
[self setupAudioUnit];
// [super setup:@"http://localhost" port:5321];
}
-(void) setupAudioUnit
{
AudioComponentDescription desc;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_VoiceProcessingIO;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
AudioComponent comp = AudioComponentFindNext(NULL, &desc);
OSStatus status;
status = AudioComponentInstanceNew(comp, &_audioUnit);
if(status != noErr)
{
NSLog(@"Error creating AudioUnit instance");
}
// Enable input and output on AURemoteIO
// Input is enabled on the input scope of the input element
// Output is enabled on the output scope of the output element
UInt32 one = 1;
status = AudioUnitSetProperty(_audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &one, sizeof(one));
if(status != noErr)
{
NSLog(@"Error enableling AudioUnit output bus");
}
// Explicitly set the input and output client formats
// sample rate = 44100, num channels = 1, format = 16 bit int point
AudioStreamBasicDescription audioFormat = [self getAudioDescription];
status = AudioUnitSetProperty(_audioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, kOutputBus, &audioFormat, sizeof(audioFormat));
if(status != noErr)
{
NSLog(@"Error setting audio format");
}
AURenderCallbackStruct renderCallback;
renderCallback.inputProc = OutputRenderCallback;
renderCallback.inputProcRefCon = (__bridge void *)(self);
status = AudioUnitSetProperty(_audioUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Global, kOutputBus, &renderCallback, sizeof(renderCallback));
if(status != noErr)
{
NSLog(@"Error setting rendering callback");
}
// Initialize the AURemoteIO instance
status = AudioUnitInitialize(_audioUnit);
if(status != noErr)
{
NSLog(@"Error initializing audio unit");
}
}
- (AudioStreamBasicDescription)getAudioDescription {
AudioStreamBasicDescription audioDescription = {0};
audioDescription.mFormatID = kAudioFormatLinearPCM;
audioDescription.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked | kAudioFormatFlagsNativeEndian;
audioDescription.mChannelsPerFrame = 1;
audioDescription.mBytesPerPacket = sizeof(SInt16)*audioDescription.mChannelsPerFrame;
audioDescription.mFramesPerPacket = 1;
audioDescription.mBytesPerFrame = sizeof(SInt16)*audioDescription.mChannelsPerFrame;
audioDescription.mBitsPerChannel = 8 * sizeof(SInt16);
audioDescription.mSampleRate = 44100.0;
return audioDescription;
}
-(void)circularBuffer:(TPCircularBuffer *)circularBuffer withSize:(int)size {
TPCircularBufferInit(circularBuffer,size);
}
-(void)appendDataToCircularBuffer:(TPCircularBuffer*)circularBuffer
fromAudioBufferList:(AudioBufferList*)audioBufferList {
TPCircularBufferProduceBytes(circularBuffer,
audioBufferList->mBuffers[0].mData,
audioBufferList->mBuffers[0].mDataByteSize);
}
-(void)freeCircularBuffer:(TPCircularBuffer *)circularBuffer {
TPCircularBufferClear(circularBuffer);
TPCircularBufferCleanup(circularBuffer);
}
-(TPCircularBuffer *) outputShouldUseCircularBuffer
{
return &_circularBuffer;
}
-(void) stop
{
OSStatus status = AudioOutputUnitStop(_audioUnit);
if(status != noErr)
{
NSLog(@"Error stopping audio unit");
}
TPCircularBufferClear(&_circularBuffer);
_audioUnit = nil;
stopped = YES;
}
-(void)stream:(NSStream *)stream handleEvent:(NSStreamEvent)event{
switch (event) {
case NSStreamEventOpenCompleted:
NSLog(@"Stream opened");
break;
case NSStreamEventHasBytesAvailable:
if (stream == [super inputStream]) {
NSLog(@"NSStreamEventHasBytesAvailable");
uint8_t buffer[1024];
NSUInteger len;
while ([[super inputStream] hasBytesAvailable]) {
len = [[super inputStream] read:buffer maxLength:sizeof(buffer)];
if (len > 0) {
//converting buffer to byte data
NSString *output = [[NSString alloc] initWithBytes:buffer length:len encoding:NSASCIIStringEncoding];
if (nil != output) {
//NSLog(@"server overideddddd said: %@", output);
}
NSData *data0 = [[NSData alloc] initWithBytes:buffer length:len];
if (nil != data0) {
SInt16* byteData = (SInt16*)malloc(len);
memcpy(byteData, [data0 bytes], len);
double sum = 0.0;
for(int i = 0; i < len/2; i++) {
sum += byteData[i] * byteData[i];
}
Byte* soundData = (Byte*)malloc(len);
memcpy(soundData, [data0 bytes], len);
if(soundData)
{
AudioBufferList *theDataBuffer = (AudioBufferList*) malloc(sizeof(AudioBufferList) *1);
theDataBuffer->mNumberBuffers = 1;
theDataBuffer->mBuffers[0].mDataByteSize = (UInt32)len;
theDataBuffer->mBuffers[0].mNumberChannels = 1;
theDataBuffer->mBuffers[0].mData = (SInt16*)soundData;
NSLog(@"soundData here");
[self appendDataToCircularBuffer:&_circularBuffer fromAudioBufferList:theDataBuffer];
}
}
}
}
}
break;
case NSStreamEventErrorOccurred:
NSLog(@"Can't connect to server");
break;
case NSStreamEventEndEncountered:
[stream close];
[stream removeFromRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
break;
default:
NSLog(@"Unknown event");
}
[super stream:stream handleEvent:event];
}
@end
Я был бы очень признателен, если бы был кто-нибудь с примером воспроизведения буферов, возвращаемых с сокет-сервера в аудио-очередь, чтобы я мог слушать звук, исходящий от сокет-сервера.
Спасибо