AVAssetWritter не работает со звуком - PullRequest
3 голосов
/ 05 марта 2011

Я пытаюсь заставить звук работать с видео для приложения iOS. Видео в порядке. В файл не записывается звук (динамик «Мой iPhone» работает).

Вот настройки init:

session = [[AVCaptureSession alloc] init];
    menu->session = session;
    menu_open = NO;
    session.sessionPreset = AVCaptureSessionPresetMedium;
    camera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    microphone = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
    menu->camera = camera;
    [session beginConfiguration];
    [camera lockForConfiguration:nil];
    if([camera isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]){
        camera.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
    }
    if([camera isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]){
        camera.focusMode = AVCaptureFocusModeContinuousAutoFocus;
    }
    if([camera isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]){
        camera.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance;
    }
    if ([camera hasTorch]) {
        if([camera isTorchModeSupported:AVCaptureTorchModeOn]){
            [camera setTorchMode:AVCaptureTorchModeOn];
        }
    }
    [camera unlockForConfiguration];
    [session commitConfiguration];
    AVCaptureDeviceInput * camera_input = [AVCaptureDeviceInput deviceInputWithDevice:camera error:nil];
    [session addInput:camera_input];
    microphone_input = [[AVCaptureDeviceInput deviceInputWithDevice:microphone error:nil] retain];
    AVCaptureVideoDataOutput * output = [[[AVCaptureVideoDataOutput alloc] init] autorelease];
    output.videoSettings = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
    [session addOutput:output];
    output.minFrameDuration = CMTimeMake(1,30);
    dispatch_queue_t queue = dispatch_queue_create("MY QUEUE", NULL);
    [output setSampleBufferDelegate:self queue:queue];
    dispatch_release(queue);
    audio_output = [[[AVCaptureAudioDataOutput alloc] init] retain];
    queue = dispatch_queue_create("MY QUEUE", NULL);
    AudioOutputBufferDelegate * special_delegate = [[[AudioOutputBufferDelegate alloc] init] autorelease];
    special_delegate->normal_delegate = self;
    [special_delegate retain];
    [audio_output setSampleBufferDelegate:special_delegate queue:queue];
    dispatch_release(queue);
    [session startRunning];

Вот начало и конец записи:

if (recording) { //Hence stop recording
    [video_button setTitle:@"Video" forState: UIControlStateNormal];
    recording = NO;
    [writer_input markAsFinished];
    [audio_writer_input markAsFinished];
    [video_writer endSessionAtSourceTime: CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate: start_time],30)];
    [video_writer finishWriting];
    UISaveVideoAtPathToSavedPhotosAlbum(temp_url,self,@selector(video:didFinishSavingWithError:contextInfo:),nil);
    [start_time release];
    [temp_url release];
    [av_adaptor release];
    [microphone lockForConfiguration:nil];
    [session beginConfiguration];
    [session removeInput:microphone_input];
    [session removeOutput:audio_output];
    [session commitConfiguration];
    [microphone unlockForConfiguration];
    [menu restateConfigiration];
    [vid_off play];
}else{ //Start recording
    [vid_on play];
    [microphone lockForConfiguration:nil];
    [session beginConfiguration];
    [session addInput:microphone_input];
    [session addOutput:audio_output];
    [session commitConfiguration];
    [microphone unlockForConfiguration];
    [menu restateConfigiration];
    [video_button setTitle:@"Stop" forState: UIControlStateNormal];
    recording = YES;
    NSError *error = nil;
    NSFileManager * file_manager = [[NSFileManager alloc] init];
    temp_url = [[NSString alloc] initWithFormat:@"%@/%@", NSTemporaryDirectory(), @"temp.mp4"];
    [file_manager removeItemAtPath: temp_url error:NULL];
    [file_manager release];
    video_writer = [[AVAssetWriter alloc] initWithURL: [NSURL fileURLWithPath:temp_url] fileType: AVFileTypeMPEG4 error: &error];
    NSDictionary *video_settings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey,[NSNumber numberWithInt:360], AVVideoWidthKey,[NSNumber numberWithInt:480], AVVideoHeightKey,nil];
    writer_input = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:video_settings] retain];
    AudioChannelLayout acl;
    bzero( &acl, sizeof(acl));
    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
    audio_writer_input = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings: [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,[NSNumber numberWithInt: 1], AVNumberOfChannelsKey,[NSNumber numberWithFloat: 44100.0], AVSampleRateKey,[NSNumber numberWithInt: 64000], AVEncoderBitRateKey,[NSData dataWithBytes: &acl length: sizeof(acl) ], AVChannelLayoutKey,nil]] retain];
    audio_writer_input.expectsMediaDataInRealTime = YES;
    av_adaptor = [[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput: writer_input sourcePixelBufferAttributes:NULL] retain];
    [video_writer addInput:writer_input];
    [video_writer addInput: audio_writer_input];
    [video_writer startWriting];
    [video_writer startSessionAtSourceTime: CMTimeMake(0,1)];
    start_time = [[NSDate alloc] init];
}

Вот делегат для аудио:

@implementation AudioOutputBufferDelegate
    -(void)captureOutput: (AVCaptureOutput *) captureOutput didOutputSampleBuffer: (CMSampleBufferRef) sampleBuffer fromConnection: (AVCaptureConnection *) conenction{
        if (normal_delegate->recording) {
            CMSampleBufferSetOutputPresentationTimeStamp(sampleBuffer,CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate: normal_delegate->start_time],30));
            [normal_delegate->audio_writer_input appendSampleBuffer: sampleBuffer];
        }
    }
@end

Метод видео не имеет значения, потому что он работает. «restateConfigiration» просто сортирует конфигурацию сеанса, в противном случае факел выключается и т. д .:

[session beginConfiguration];
    switch (quality) {
        case Low:
            session.sessionPreset = AVCaptureSessionPresetLow;
            break;
        case Medium:
            session.sessionPreset = AVCaptureSessionPreset640x480;
            break;
    }
    [session commitConfiguration];
    [camera lockForConfiguration:nil];
    if([camera isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]){
        camera.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
    }
    if([camera isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]){
        camera.focusMode = AVCaptureFocusModeContinuousAutoFocus;
    }
    if([camera isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]){
        camera.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance;
    }
    if ([camera hasTorch]) {
        if (torch) {
            if([camera isTorchModeSupported:AVCaptureTorchModeOn]){
                [camera setTorchMode:AVCaptureTorchModeOn];
            }
        }else{
            if([camera isTorchModeSupported:AVCaptureTorchModeOff]){
                [camera setTorchMode:AVCaptureTorchModeOff];
            }
        }
    }
    [camera unlockForConfiguration];

Спасибо за любую помощь.

1 Ответ

8 голосов
/ 05 марта 2011

AVAssetWriter и аудио

Это может быть та же проблема, что упоминается в связанном посте. Попробуйте закомментировать эти строки

[writer_input markAsFinished];
[audio_writer_input markAsFinished];
[video_writer endSessionAtSourceTime: CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate: start_time],30)];

Редактировать

Я не знаю, правильно ли вы устанавливаете метку времени презентации. Я справляюсь с этим с помощью локальной переменной, которая при запуске устанавливается в 0. Затем, когда мой делегат получает первый пакет, я делаю:

if (_startTime.value == 0) {
    _startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
}

, а затем

[bufferWriter->writer startWriting];
[bufferWriter->writer startSessionAtSourceTime:_startTime];

Ваш код выглядит действительным, так как вы рассчитываете разницу во времени для каждого полученного пакета. Тем не менее, AVFoundation рассчитывает это для вас, а также оптимизирует временные метки для размещения в чередующемся контейнере. Еще одна вещь, в которой я не уверен - каждый CMSampleBufferRef для аудио содержит более 1 буфера данных, где каждый буфер данных имеет свой собственный PTS. Я не уверен, что настройка PTS автоматически настраивает все остальные буферы данных.

Если мой код отличается от вашего, я использую одну очередь отправки для аудио и видео. В обратном вызове я использую (некоторый код удален).

switch (bufferWriter->writer.status) {
    case AVAssetWriterStatusUnknown:

        if (_startTime.value == 0) {
            _startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
        }

        [bufferWriter->writer startWriting];
        [bufferWriter->writer startSessionAtSourceTime:_startTime];

        //Break if not ready, otherwise fall through.
        if (bufferWriter->writer.status != AVAssetWriterStatusWriting) {
            break ;
        }

    case AVAssetWriterStatusWriting:
        if( captureOutput == self.captureManager.audioOutput) {
                if( !bufferWriter->audioIn.readyForMoreMediaData) { 
                    break;
                }

                @try {
                    if( ![bufferWriter->audioIn appendSampleBuffer:sampleBuffer] ) {
                        [self delegateMessage:@"Audio Writing Error" withType:ERROR];
                    }
                }
                @catch (NSException *e) {
                    NSLog(@"Audio Exception: %@", [e reason]);
                }
        }
        else if( captureOutput == self.captureManager.videoOutput ) {

            if( !bufferWriter->videoIn.readyForMoreMediaData) { 
                break;; 
            }

            @try {
                if (!frontCamera) {
                    if( ![bufferWriter->videoIn appendSampleBuffer:sampleBuffer] ) {
                        [self delegateMessage:@"Video Writing Error" withType:ERROR];
                    }
                }
                else {
                    CMTime pt = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);

                    flipBuffer(sampleBuffer, pixelBuffer);

                    if( ![bufferWriter->adaptor appendPixelBuffer:pixelBuffer withPresentationTime:pt] ) {
                        [self delegateMessage:@"Video Writing Error" withType:ERROR];
                    }
                }

            }
            @catch (NSException *e) {
                NSLog(@"Video Exception Exception: %@", [e reason]);
            }
        }

        break;
    case AVAssetWriterStatusCompleted:
        return;
    case AVAssetWriterStatusFailed: 
        [self delegateMessage:@"Critical Error Writing Queues" withType:ERROR];
        bufferWriter->writer_failed = YES ;
        _broadcastError = YES;
        [self stopCapture] ;
        return;
    case AVAssetWriterStatusCancelled:
        break;
    default:
        break;
}
...