Здесь я получаю примеры буферов, используя средство чтения ресурсов, а затем обрабатываю каждый кадр для настройки. Но аудио отсутствует для окончательного видео, сохраненного в документах. Я знаю, что есть другой способ обработки каждого кадра, например «applicationCIFiltersWithHandler», но мне нужен каждый буфер сэмплов и визуализация изображения или фильтра по нему. Предложить мне решение для этого?
NSError *error;
NSString *path = [[NSBundle mainBundle] pathForResource:@"recordmovie" ofType:@"mov"];
NSURL *videoURL = [NSURL fileURLWithPath:path];
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:videoURL options:nil];;
AVAssetReader *reader = [[AVAssetReader alloc] initWithAsset:asset error:nil];
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
// add audio track here
AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] firstObject];
NSDictionary *readerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange], kCVPixelBufferPixelFormatTypeKey, nil];
CGSize renderSize = [videoTrack naturalSize];
/*
NSDictionary *readerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264 , AVVideoCodecKey,
renderSize.width , AVVideoWidthKey,
renderSize.height , AVVideoHeightKey,
AVVideoScalingModeResizeAspectFill,AVVideoScalingModeKey, nil];
*/
AVAssetReaderTrackOutput* readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack
outputSettings:readerOutputSettings];
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary* audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
[ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
nil];
NSDictionary *settings = @{ AVFormatIDKey : [NSNumber numberWithInt:kAudioFormatLinearPCM] };
AVAssetReaderTrackOutput *audioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:settings];
[reader addOutput:readerOutput];
[reader addOutput:audioTrackOutput];
[reader startReading];
NSMutableArray *samples = [[NSMutableArray alloc] init];
CMSampleBufferRef sample;
while((sample = [readerOutput copyNextSampleBuffer])) {
[samples addObject:(__bridge id)sample];
CFRelease(sample);
}
NSString *outputPath = [self getDocumentsUrlForFilterMovie];
NSURL *outputURL = [NSURL fileURLWithPath:outputPath];
AVAssetWriter *writer = [[AVAssetWriter alloc] initWithURL:outputURL
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSDictionary *videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
@(videoTrack.estimatedDataRate), AVVideoAverageBitRateKey,
nil];
NSDictionary *writerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:videoTrack.naturalSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:videoTrack.naturalSize.height], AVVideoHeightKey,
videoCompressionProps, AVVideoCompressionPropertiesKey,
nil];
AVAssetWriterInput *writerInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo
outputSettings:writerOutputSettings
sourceFormatHint:(__bridge CMFormatDescriptionRef)[videoTrack.formatDescriptions lastObject]];
[writerInput setExpectsMediaDataInRealTime:NO];
[writer addInput:writerInput];
AVAssetWriterInput *WriterAudioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings];
WriterAudioInput.expectsMediaDataInRealTime = YES;
if([writer canAddInput:WriterAudioInput]) {
[writer addInput:WriterAudioInput];
}
AVAssetWriterInputPixelBufferAdaptor *pixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];
[writer startWriting];
[writer startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp((__bridge CMSampleBufferRef)samples[0])];
//NSMutableArray *audioSamples = [[NSMutableArray alloc] init];
while((sample = [audioTrackOutput copyNextSampleBuffer])) {
//[audioSamples addObject:(__bridge id)sample];
[WriterAudioInput appendSampleBuffer:sample];
while (!WriterAudioInput.readyForMoreMediaData) {
[NSThread sleepForTimeInterval:0.1];
}
CFRelease(sample);
}
CIFilter *filter = [CIFilter filterWithName:@"CISepiaTone"];
[filter setDefaults];
[filter setValue:@(1) forKey:kCIInputIntensityKey];
//CIImage *outputImage = filter.outputImage;
for(NSInteger i = 0; i < samples.count; i++) {
CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp((__bridge CMSampleBufferRef)samples[i]);
//CVPixelBufferRef videoFrameBuffer = CMSampleBufferGetImageBuffer((__bridge CMSampleBufferRef)samples[samples.count - i - 1]);
CVPixelBufferRef videoFrameBuffer = CMSampleBufferGetImageBuffer((__bridge CMSampleBufferRef)samples[i]);
CIImage *frameImage = [CIImage imageWithCVPixelBuffer:videoFrameBuffer];
[filter setValue:frameImage forKey:kCIInputImageKey];
CIImage *outputImage = filter.outputImage;
//}
[self->ciContext render:outputImage toCVPixelBuffer:videoFrameBuffer bounds:outputImage.extent colorSpace:self->colorSpace];
while (!writerInput.readyForMoreMediaData) {
[NSThread sleepForTimeInterval:0.1];
}
// [writerInput appendSampleBuffer:videoFrameBuffer];
[pixelBufferAdaptor appendPixelBuffer:videoFrameBuffer withPresentationTime:presentationTime];
}
[writerInput markAsFinished];
[writer finishWritingWithCompletionHandler:^(){
//[self.delegate didFinishReverse:YES andVideoURL:outputURL withError:error];
NSLog(@"Finish video rendering");
}];
});