CMSampleBuffer из Replay Kit после кодирования в Mpeg-TS Видео нормальное, аудио повреждено - PullRequest
0 голосов
/ 18 апреля 2019

Я пытаюсь передавать потоковые аудиоданные в формате AAC, полученные как CMSampleBuffer. AudioConverterFillComplexBuffer возвращает 0 кодов состояния. Но после передачи этих данных в мой FFMPEG HLSWriter аудио не сохраняется правильно (короткие укороченные сигналы). Ниже приведен пример кода.

static OSStatus inInputDataProc(AudioConverterRef inAudioConverter,
                                    UInt32 *ioNumberDataPackets,
                                    AudioBufferList *ioData,
                                    AudioStreamPacketDescription **outDataPacketDescription,
                                    void *inUserData)
    {
        KFAACEncoder *encoder = (__bridge KFAACEncoder *)(inUserData);
        UInt32 requestedPackets = *ioNumberDataPackets;

        if(requestedPackets > encoder.cycledBuffer.size / 2)
        {
            //NSLog(@"PCM buffer isn't full enough!");
            *ioNumberDataPackets = 0;
            return  -1;
        }

        static size_t staticBuffSize = 4096;
        static void* staticBuff = nil;

        if(!staticBuff)
        {
            staticBuff = malloc(staticBuffSize);
        }

        size_t outputBytesSize = requestedPackets * 2;
        [encoder.cycledBuffer popToBuffer:staticBuff bytes: outputBytesSize];
        ioData->mBuffers[0].mData = staticBuff;
        ioData->mBuffers[0].mDataByteSize = (int)outputBytesSize;

        *ioNumberDataPackets = ioData->mBuffers[0].mDataByteSize / 2;

        return noErr;
    }

    - (void) encodeSampleBuffer:(CMSampleBufferRef)sampleBuffer
        {
            CFRetain(sampleBuffer);
            dispatch_async(self.encoderQueue,
                           ^{

                               if (!_audioConverter)
                               {
                                   [self setupAACEncoderFromSampleBuffer:sampleBuffer];
                               }

                               CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
                               CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
                               CFRetain(blockBuffer);

                               size_t pcmBufferSize = 0;
                               void* pcmBuffer = nil;
                               OSStatus status = CMBlockBufferGetDataPointer(blockBuffer, 0, NULL, &pcmBufferSize, &pcmBuffer);

                               [_cycledBuffer push:pcmBuffer size:pcmBufferSize];

                               NSError *error = nil;

                               if (status != kCMBlockBufferNoErr)
                               {
                                   error = [NSError errorWithDomain:NSOSStatusErrorDomain code:status userInfo:nil];
                               }

                               memset(_aacBuffer, 0, _aacBufferSize);
                               AudioBufferList outAudioBufferList = {0};
                               outAudioBufferList.mNumberBuffers = 1;
                               outAudioBufferList.mBuffers[0].mNumberChannels = 1;
                               outAudioBufferList.mBuffers[0].mDataByteSize = _aacBufferSize;
                               outAudioBufferList.mBuffers[0].mData = _aacBuffer;
    AudioStreamPacketDescription *outPacketDescription = NULL;
                               UInt32 ioOutputDataPacketSize = 1;
                               status = AudioConverterFillComplexBuffer(_audioConverter,
                                                                        inInputDataProc,
                                                                        (__bridge void *)(self),
                                                                        &ioOutputDataPacketSize,
                                                                        &outAudioBufferList,
                                                                        NULL);



     NSData *data = nil;
                           if (status == 0)
                           {
                               NSData *rawAAC = [NSData dataWithBytes:outAudioBufferList.mBuffers[0].mData length:outAudioBufferList.mBuffers[0].mDataByteSize];
                               if (_addADTSHeader) {
                                   NSData *adtsHeader = [self adtsDataForPacketLength:rawAAC.length];
                                   NSMutableData *fullData = [NSMutableData dataWithData:adtsHeader];
                                   [fullData appendData:rawAAC];
                                   data = fullData;
                               } else {
                                   data = rawAAC;
                               }
                           } else {
                               error = [NSError errorWithDomain:NSOSStatusErrorDomain code:status userInfo:nil];
                           }
                           if (self.delegate) {
                               KFFrame *frame = [[KFFrame alloc] initWithData:data pts:pts];
                               NSLog(@"Bytes of data %lu", (unsigned long)data.length);
                               dispatch_async(self.callbackQueue, ^{
                                   [self.delegate encoder:self encodedFrame:frame];
                               });
                           }
                           CFRelease(sampleBuffer);
                           CFRelease(blockBuffer);
                       });
        }
...