- 1-ое упоминание пути для видео, аудио и конечного пути к видео
*
- (IBAction)makeMovieButtonTouchUpInside:(id)sender {
if (selectedImageArray.count == 0) {
[[[UIAlertView alloc]initWithTitle:@"please select at least one image" message:@"" delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil, nil]show];
return;
}
NSString *documentsDirectory = [NSHomeDirectory()
stringByAppendingPathComponent:@"Documents"];
NSString *audioFilePath = [[NSBundle mainBundle] pathForResource:@"30secs" ofType:@"mp3"];
NSString *videoPath = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"test_output.mp4"]];
NSString *finalVideoFilePath = [documentsDirectory stringByAppendingPathComponent:@"final_video.mp4"];
UIImage *img = [selectedImageArray objectAtIndex:0];
NSLog(@"h:%@,w:%@",[NSNumber numberWithFloat:img.size.height],[NSNumber numberWithFloat:img.size.width]);
[self writeImageAndAudioAsMovie:img andVideoPath:videoPath andAudio:audioFilePath andFinalVideoPath:finalVideoFilePath duration:30];
}
*
NSLog(@"start make movie: length:%d",duration);
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:videoPath] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
if ([[NSFileManager defaultManager] fileExistsAtPath:videoPath]){//ImageVideoPath
[[NSFileManager defaultManager] removeItemAtPath:videoPath error:nil];
}
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:image.size.width],AVVideoWidthKey,
[NSNumber numberWithInt:image.size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
writerInput.expectsMediaDataInRealTime = YES;
[videoWriter setShouldOptimizeForNetworkUse:YES];
[videoWriter addInput:writerInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
NSUInteger fps = 30;
int frameCount = 0;
double numberOfSecondsPerFrame = 6;
double frameDuration = fps * numberOfSecondsPerFrame;
for(UIImage * img in selectedImageArray)
{
buffer = [self pixelBufferFromCGImage:[img CGImage]];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
//print out status:
NSLog(@"Processing video frame (%d,%d)",frameCount,[imageArray count]);
CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(!append_ok){
NSError *error = videoWriter.error;
if(error!=nil) {
NSLog(@"Unresolved error %@,%@.", error, [error userInfo]);
}
}
}
else {
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n, with error.", frameCount, j);
}
frameCount++;
}
//Finish the session:
[videoWriter endSessionAtSourceTime:CMTimeMake(60*8, 1)];//give a user defined duration and endthe session
[writerInput markAsFinished];
NSURL *refURL = [[NSURL alloc] initFileURLWithPath:videoPath];
[videoArray addObject:refURL];
//get the iOS version of the device
float version = [[[UIDevice currentDevice] systemVersion] floatValue];
if (version < 6.0){
[videoWriter finishWriting];
NSLog (@"finished writing iOS version:%f",version);
} else {
[videoWriter finishWritingWithCompletionHandler:^(){
NSLog (@"finished writing iOS version:%f",version);
}];
}
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
[self addAudioToFileAtPath:audioFilePath toVideoPath:videoPath andFinalVideoPath:finalVideoPath];
}
Утилита для пиксельного буфера из изображения *
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVPixelBufferCreate(kCFAllocatorDefault, CGImageGetWidth(image),
CGImageGetHeight(image), kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
&pxbuffer);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, CGImageGetWidth(image),
CGImageGetHeight(image), 8, 4*CGImageGetWidth(image), rgbColorSpace,
(CGBitmapInfo)kCGImageAlphaNoneSkipFirst);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
- Добавить AudioFile к видео
**
-(void) addAudioToFileAtPath:(NSString *) audiofilePath toVideoPath:(NSString
*)videoFilePath andFinalVideoPath:(NSString *)finalVideoPath
{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
// audio input file...
NSURL *audio_inputFileUrl = [NSURL fileURLWithPath:audiofilePath];
// this is the video file that was just written above, full path to file is in --> videoOutputPath
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoFilePath];
// create the final video output file as MOV file - may need to be MP4, but this works so far...
NSURL *outputFileUrl = [NSURL fileURLWithPath:finalVideoPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:finalVideoPath])
[[NSFileManager defaultManager] removeItemAtPath:finalVideoPath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
//_assetExport.outputFileType = @"com.apple.quicktime-movie";
_assetExport.outputFileType = @"public.mpeg-4";
//NSLog(@"support file types= %@", [_assetExport supportedFileTypes]);
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (_assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
// export complete
NSLog(@"Export Complete");
break;
case AVAssetExportSessionStatusFailed:
NSLog(@"Export Failed");
NSLog(@"ExportSessionError: %@", [_assetExport.error localizedDescription]);
// export error (see exportSession.error)
break;
case AVAssetExportSessionStatusCancelled:
NSLog(@"Export Failed");
NSLog(@"ExportSessionError: %@", [_assetExport.error localizedDescription]);
// export cancelled
break;
}
//If you want to save the video to Photo Album
UISaveVideoAtPathToSavedPhotosAlbum (finalVideoPath,self, @selector(video:didFinishSavingWithError: contextInfo:), nil);
}
];
///// THAT IS IT DONE... the final video file will be written here...
NSLog(@"DONE.....outputFilePath--->%@", finalVideoPath);
// the final video file will be located somewhere like here:
// /Users/caferrara/Library/Application Support/iPhone Simulator/6.0/Applications/D4B12FEE-E09C-4B12-B772-7F1BD6011BE1/Documents/outputFile.mov
}
**
- Показать предупреждение дляуспешное создание видео
**
- (void) video: (NSString *) videoPath didFinishSavingWithError: (NSError *) error
contextInfo: (void *) contextInfo {ALAssetsLibrary * library =[[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:url completionBlock:^(NSURL *assetURL, NSError
*error){
if (error) {
NSLog(@"Video could not be saved ,Error:%@",error);
[[[UIAlertView alloc]initWithTitle:@"Sorry!!" message:@"Video data is Nil" delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil, nil]show];
}else{
if (![videoArray containsObject:url]) {
[videoArray addObject:url];
}
UIAlertView *alert = [[UIAlertView alloc]initWithTitle:@"Done"
message:@"Movie succesfully exported."
delegate:nil
cancelButtonTitle:@"OK"
otherButtonTitles:nil, nil];
[alert show];
}
}];
} **