создать видео из массива UIImages и сохранить видео в библиотеке iPhone.AVAssetLibrary + AVFoundation - PullRequest
14 голосов
/ 19 января 2011

Проблема при сохранении видео в библиотеку iPhone.У меня есть массив UIImages и две кнопки: «convertToVideo» и «saveToiPhoneLib»


-(IBAction) convertToVideo
{
  NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);

 NSString *documentsDirectory = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;

NSString *savedVideoPath = [documentsDirectory stringByAppendingPathComponent:@"videoOutput"];

printf(" \n\n\n-Video file == %s--\n\n\n",[savedVideoPath UTF8String]);

[self writeImageAsMovie:imageArray toPath:savedVideoPath size:self.view.frame.size duration:3];
}


here i'm passing the imageArray and savedVideoPath to the function below


-(void)writeImageAsMovie:(NSArray *)array toPath:(NSString*)path size:(CGSize)size duration:(int)duration 
{

 NSError *error = nil;

 AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
          [NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
                 error:&error];


 NSParameterAssert(videoWriter);

 NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
           AVVideoCodecH264, AVVideoCodecKey,
           [NSNumber numberWithInt:size.width], AVVideoWidthKey,
           [NSNumber numberWithInt:size.height], AVVideoHeightKey,
           nil];
 AVAssetWriterInput* writerInput = [[AVAssetWriterInput
          assetWriterInputWithMediaType:AVMediaTypeVideo
          outputSettings:videoSettings] retain];


// NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

 AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
              assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
              sourcePixelBufferAttributes:nil];


 NSParameterAssert(writerInput);
 NSParameterAssert([videoWriter canAddInput:writerInput]);
 [videoWriter addInput:writerInput];


 //Start a session:
 [videoWriter startWriting];
 [videoWriter startSessionAtSourceTime:kCMTimeZero];

 CVPixelBufferRef buffer = NULL;

 //convert uiimage to CGImage.

 buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:0] CGImage]];
 [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];

 //Write samples:
......


 //Finish the session:
 [writerInput markAsFinished];
 [videoWriter finishWriting];
}


generate a CVPixelBufferRef here


- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image
{
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
        [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
        [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
        nil];
    CVPixelBufferRef pxbuffer = NULL;

    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, self.view.frame.size.width,
            self.view.frame.size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, 
            &pxbuffer);
    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);

    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, self.view.frame.size.width,
             self.view.frame.size.height, 8, 4*self.view.frame.size.width, rgbColorSpace, 
             kCGImageAlphaNoneSkipFirst);
    NSParameterAssert(context);
    CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), 
             CGImageGetHeight(image)), image);
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

    return pxbuffer;
}

, сохраняемые в библиотеке iPhone


-(IBAction) saveToiPhoneLib
{

 NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);

 NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil;

 NSString *getImagePath = [basePath stringByAppendingPathComponent:@"videoOutput"];

 printf(" \n\n\n-Video file == %s--\n\n\n",[getImagePath UTF8String]);

 UISaveVideoAtPathToSavedPhotosAlbum ( getImagePath,self, @selector(video:didFinishSavingWithError: contextInfo:), nil);
}


- (void) video: (NSString *) videoPath didFinishSavingWithError: (NSError *) error contextInfo: (void *) contextInfo {
 NSLog(@"Finished saving video with error: %@", error);
}

, но при сохранении я получаю сообщение об ошибке: -

Завершено сохранение видео с ошибкой: Ошибка Домен = ALAssetsLibraryErrorDomain Code = -3302 «Неверные данные» UserInfo = 0x1d59f0 {NSLocalizedFailureReason = Произошла ошибка при записи этого ресурса, поскольку данные недопустимы и их нельзя просмотреть или воспроизвести.NSLocalizedRecoverySuggestion = Попробуйте с другими данными, NSLocalizedDescription = Неверные данные}

пожалуйста, дайте мне знать мою ошибку.заранее спасибо

Ответы [ 4 ]

5 голосов
/ 25 февраля 2013
-(void)convertimagetoVideo
{
    ///////////// setup OR function def if we move this to a separate function ////////////
    // this should be moved to its own function, that can take an imageArray, videoOutputPath, etc...


NSError *error = nil;


// set up file manager, and file videoOutputPath, remove "test_output.mp4" if it exists...
//NSString *videoOutputPath = @"/Users/someuser/Desktop/test_output.mp4";
NSFileManager *fileMgr = [NSFileManager defaultManager];
NSString *documentsDirectory = [NSHomeDirectory()
                                stringByAppendingPathComponent:@"Documents"];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:@"test_output.mp4"];
//NSLog(@"-->videoOutputPath= %@", videoOutputPath);
// get rid of existing mp4 if exists...
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
    NSLog(@"Unable to delete file: %@", [error localizedDescription]);


CGSize imageSize = CGSizeMake(400, 200);
//    NSUInteger fps = 30;
    NSUInteger fps = 30;

//NSMutableArray *imageArray;
//imageArray = [[NSMutableArray alloc] initWithObjects:@"download.jpeg", @"download2.jpeg", nil];
NSMutableArray *imageArray;
NSArray* imagePaths = [[NSBundle mainBundle] pathsForResourcesOfType:@"png" inDirectory:nil];
imageArray = [[NSMutableArray alloc] initWithCapacity:imagePaths.count];
NSLog(@"-->imageArray.count= %i", imageArray.count);
for (NSString* path in imagePaths)
{
    [imageArray addObject:[UIImage imageWithContentsOfFile:path]];
    //NSLog(@"-->image path= %@", path);
}

//////////////     end setup    ///////////////////////////////////

NSLog(@"Start building video from defined frames.");

AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                              [NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
                                                          error:&error];
NSParameterAssert(videoWriter);

NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                               AVVideoCodecH264, AVVideoCodecKey,
                               [NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,
                               [NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
                               nil];

AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
                                        assetWriterInputWithMediaType:AVMediaTypeVideo
                                        outputSettings:videoSettings];


AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                 assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
                                                 sourcePixelBufferAttributes:nil];

NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];

//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];

CVPixelBufferRef buffer = NULL;

//convert uiimage to CGImage.
int frameCount = 0;
double numberOfSecondsPerFrame = 6;
double frameDuration = fps * numberOfSecondsPerFrame;

//for(VideoFrame * frm in imageArray)
NSLog(@"**************************************************");
for(UIImage * img in imageArray)
{
    //UIImage * img = frm._imageFrame;
    buffer = [self pixelBufferFromCGImage:[img CGImage]];

    BOOL append_ok = NO;
    int j = 0;
    while (!append_ok && j < 30) {
        if (adaptor.assetWriterInput.readyForMoreMediaData)  {
            //print out status:
            NSLog(@"Processing video frame (%d,%d)",frameCount,[imageArray count]);

            //CMTime frameTime = CMTimeMake((int64_t), (int32_t)2);

            CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
            NSLog(@"seconds = %f, %u, %d", CMTimeGetSeconds(frameTime),fps,j);
            append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
            if(!append_ok){
                NSError *error = videoWriter.error;
                if(error!=nil) {
                    NSLog(@"Unresolved error %@,%@.", error, [error userInfo]);
                }
            }
        }
        else {
            printf("adaptor not ready %d, %d\n", frameCount, j);
            [NSThread sleepForTimeInterval:0.1];
        }
        j++;
    }
    if (!append_ok) {
        printf("error appending image %d times %d\n, with error.", frameCount, j);
    }
    frameCount++;
}
NSLog(@"**************************************************");

//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
NSLog(@"Write Ended");

}


-(void)CompileFilestomakeVideo
{

    // set up file manager, and file videoOutputPath, remove "test_output.mp4" if it exists...
    //NSString *videoOutputPath = @"/Users/someuser/Desktop/test_output.mp4";
    NSString *documentsDirectory = [NSHomeDirectory()
                                    stringByAppendingPathComponent:@"Documents"];
    NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:@"test_output.mp4"];
    //NSLog(@"-->videoOutputPath= %@", videoOutputPath);
    // get rid of existing mp4 if exists...

    AVMutableComposition* mixComposition = [AVMutableComposition composition];

    NSString *bundleDirectory = [[NSBundle mainBundle] bundlePath];
    // audio input file...
    NSString *audio_inputFilePath = [bundleDirectory stringByAppendingPathComponent:@"30secs.mp3"];
    NSURL    *audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];

    // this is the video file that was just written above, full path to file is in --> videoOutputPath
    NSURL    *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];

    // create the final video output file as MOV file - may need to be MP4, but this works so far...
    NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:@"final_video.mp4"];
    NSURL    *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];

    if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
        [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];

    CMTime nextClipStartTime = kCMTimeZero;

    AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
    CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
    AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    [a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];

    //nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);

    AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
    CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
    AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    [b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];



    AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
    _assetExport.outputFileType = @"com.apple.quicktime-movie";
    //_assetExport.outputFileType = @"public.mpeg-4";
    //NSLog(@"support file types= %@", [_assetExport supportedFileTypes]);
    _assetExport.outputURL = outputFileUrl;

    [_assetExport exportAsynchronouslyWithCompletionHandler:
     ^(void ) {
         [self saveVideoToAlbum:outputFilePath];
     }
     ];

    ///// THAT IS IT DONE... the final video file will be written here...
    NSLog(@"DONE.....outputFilePath--->%@", outputFilePath);

    // the final video file will be located somewhere like here:
    // /Users/caferrara/Library/Application Support/iPhone Simulator/6.0/Applications/D4B12FEE-E09C-4B12-B772-7F1BD6011BE1/Documents/outputFile.mov


    ////////////////////////////////////////////////////////////////////////////
    ////////////////////////////////////////////////////////////////////////////
}
- (void) saveVideoToAlbum:(NSString*)path {

    NSLog(@"saveVideoToAlbum");

    if(UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(path)){
        UISaveVideoAtPathToSavedPhotosAlbum (path, self, @selector(video:didFinishSavingWithError: contextInfo:), nil);
    }
}

-(void) video:(NSString *)videoPath didFinishSavingWithError:(NSError *)error contextInfo:(void *)contextInfo {
    if(error)
        NSLog(@"error: %@", error);
    else
        NSLog(@" OK");
}



////////////////////////
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {

    CGSize size = CGSizeMake(400, 200);

    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                             nil];
    CVPixelBufferRef pxbuffer = NULL;

    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
                                          size.width,
                                          size.height,
                                          kCVPixelFormatType_32ARGB,
                                          (__bridge CFDictionaryRef) options,
                                          &pxbuffer);
    if (status != kCVReturnSuccess){
        NSLog(@"Failed to create pixel buffer");
    }

    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
                                                 size.height, 8, 4*size.width, rgbColorSpace,
                                                 kCGImageAlphaPremultipliedFirst);
    //kCGImageAlphaNoneSkipFirst);
    CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
                                           CGImageGetHeight(image)), image);
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

    return pxbuffer;
}
1 голос
/ 30 июля 2013

Используйте код ниже

</p>

- (void)creatingVideo {

    //get full path of video file from documents directory
    NSError *error = nil;
    NSFileManager *fileMgr = [NSFileManager defaultManager];
    NSString *documentsDirectory = [self applicationDocumentsDirectory];
    NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:@"test_output.mov"];

    // get rid of existing mp4 if exists...
    if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
        NSLog(@"Unable to delete file it does not exits on path");

    //size of the video frame
    CGSize imageSize = CGSizeMake(640,480);
    //CGSize imageSize = CGSizeMake(1280, 720);

    //frame per second
    NSUInteger fps = 30;

    NSLog(@"Start building video from defined frames.");


    //AvAsset library to create video of images
    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie error:&error];
    NSParameterAssert(videoWriter);
    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey, [NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,nil];

    AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain];
    NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:bufferAttributes];

    NSParameterAssert(videoWriterInput);
    NSParameterAssert([videoWriter canAddInput:videoWriterInput]);

    videoWriterInput.expectsMediaDataInRealTime = YES;
    [videoWriter addInput:videoWriterInput];

    //Start a session:
    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:kCMTimeZero];

    CVPixelBufferRef buffer = NULL;

    //frameCount.
    int frameCount = 0;
    double frameDuration;
    double numberOfSecondsPerFrame = appDelegate.delaySecond;

    NSLog(@"**************************video creation started********************************");
    for (int i = 0; i<[self.arrImageDataDict count]; i++) {
        {
            @autoreleasepool{
                UIImage *img1 = nil;

                img1 = [self getImageForVideoCreation:i];

                buffer = [self pixelBufferFromCGImage: [img1 CGImage]];
                if (buffer == NULL) {
                    NSLog(@"Pixel buffer not created");
                } else {
                    BOOL append_ok = NO;
                    int j = 0;
                    while (!append_ok && j < 20) {
                        if (adaptor.assetWriterInput.readyForMoreMediaData)  {
                            //print out status:
                            NSLog(@"Processing video frame (%d,%d) delay %f",frameCount,[self.arrImageDataDict count],numberOfSecondsPerFrame);
                            frameDuration = fps * numberOfSecondsPerFrame;
                            CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
                            append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
                            if(!append_ok){
                                NSError *error = videoWriter.error;
                                if(error!=nil) {
                                    NSLog(@"Unresolved error %@,%@.", error, [error userInfo]);
                                }
                            }
                        } else {
                            printf("adaptor not ready %d, %d\n", frameCount, j);
                            [NSThread sleepForTimeInterval:0.1];
                        }
                        j++;
                    }
                    if (!append_ok) {
                        printf("error appending image %d times %d\n, with error.", frameCount, j);
                    }
                    frameCount++;
                    CVPixelBufferRelease(buffer);
                    buffer = nil;
                }
            }
        }
    }

    //Finish the session:
    [videoWriterInput markAsFinished];

    //get the iOS version of the device
    float version = [[[UIDevice currentDevice] systemVersion] floatValue];
    if (version < 6.0)
    {
        [videoWriter finishWriting];
        //NSLog (@"finished writing iOS version:%f",version);

    } else {
        [videoWriter finishWritingWithCompletionHandler:^(){
            //NSLog (@"finished writing iOS version:%f",version);
        }];
    }

    CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
    [videoWriter release];
    [videoWriterInput release];

    //OK now add an audio file to move file
    AVMutableComposition* mixComposition = [AVMutableComposition composition];

    //Get the saved audio song path to merge it in video
    NSURL *audio_inputFileUrl ;
    NSString *filePath = [self applicationDocumentsDirectory];
    NSString *outputFilePath1 = [filePath stringByAppendingPathComponent:@"mySong.m4a"];
    audio_inputFileUrl = [[NSURL alloc]initFileURLWithPath:outputFilePath1];

    // this is the video file that was just written above
    NSURL    *video_inputFileUrl = [[NSURL alloc]initFileURLWithPath:videoOutputPath];;

    [NSThread sleepForTimeInterval:2.0];

    // create the final video output file as MOV file - may need to be MP4, but this works so far...
    NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:@"Slideshow_video.mov"];
    NSURL    *outputFileUrl = [[NSURL alloc]initFileURLWithPath:outputFilePath];

    if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
        [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];

    //AVURLAsset get video without audio
    AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
    CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
    AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    [a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
    [videoAsset release];

    [NSThread sleepForTimeInterval:3.0];

    //If audio song merged
    if (![self.appDelegate.musicFilePath isEqualToString:@"Not set"])
    {
        //*************************make sure all exception is off***********************
        AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
        CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
        AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

        if (![audioAsset tracksWithMediaType:AVMediaTypeAudio].count == 0) {
            [b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
        }
        [audioAsset release];
    }

    // Cleanup, in both success and fail cases
    [audio_inputFileUrl release];
    [video_inputFileUrl release];

    [NSThread sleepForTimeInterval:0.1];

    //AVAssetExportSession to export the video 
    AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
    _assetExport.outputFileType = AVFileTypeQuickTimeMovie;
    _assetExport.outputURL = outputFileUrl;

    [_assetExport exportAsynchronouslyWithCompletionHandler:^(void){
        switch (_assetExport.status) {
            case AVAssetExportSessionStatusCompleted:
#if !TARGET_IPHONE_SIMULATOR
                [self writeVideoToPhotoLibrary:outputFileUrl];
#endif
                [self RemoveSlideshowImagesInTemp];
                [self removeAudioFileFromDocumentsdirectory:outputFilePath1];
                [self removeAudioFileFromDocumentsdirectory:videoOutputPath];
                [outputFileUrl release];
                [_assetExport release];
                //NSLog(@"AVAssetExportSessionStatusCompleted");
                dispatch_async(dispatch_get_main_queue(), ^{
                    if (alrtCreatingVideo && alrtCreatingVideo.visible) {
                        [alrtCreatingVideo dismissWithClickedButtonIndex:alrtCreatingVideo.firstOtherButtonIndex animated:YES];
                        [databaseObj isVideoCreated:appDelegate.pro_id];
                        [self performSelector:@selector(successAlertView) withObject:nil afterDelay:0.0];
                    }
                });
                break;
            case AVAssetExportSessionStatusFailed:
                NSLog(@"Failed:%@",_assetExport.error);
                break;
            case AVAssetExportSessionStatusCancelled:
                NSLog(@"Canceled:%@",_assetExport.error);
                break;
            default:
                break;
        }
    }];
}

//writeVideoToPhotoLibrary
- (void)writeVideoToPhotoLibrary:(NSURL *)url
{
    ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];

    [library writeVideoAtPathToSavedPhotosAlbum:url completionBlock:^(NSURL *assetURL, NSError *error){
        if (error) {
            NSLog(@"Video could not be saved");
        }
    }];
    [library release];
}
0 голосов
/ 30 января 2011

Да, у меня была такая же ошибка:

Error Domain=AVFoundationErrorDomain Code=-11823 "Cannot Save" UserInfo=0x193ce0 {NSLocalizedRecoverySuggestion=Try saving again., NSUnderlyingError=0x179e40 "The operation couldn’t be completed. (OSStatus error -12412.)", NSLocalizedDescription=Cannot Save}

Но только на симуляторе, когда я работал на устройстве, сохранение в библиотеку фотографий работало просто отлично.

0 голосов
/ 19 января 2011

Это просто слишком много кода, чтобы проверить на ошибки. Убедитесь, что вы можете начать сеанс экспорта, что вы действительно получаете пиксельные буферы для ваших изображений, что писатель готов к приему дополнительных данных, что буфер добавляется без ошибок, что сеанс экспорта завершается успешно и что файл выходного фильма существует и на самом деле содержит некоторые данные. Только тогда вы можете попытаться сохранить его в системном фотоальбоме. По пути проверяйте всю доступную информацию об ошибках, чтобы вы знали, где это происходит в первый раз. (Другое дело, что вы просто берете код из Интернета и вставляете его вместе, что просто не будет работать для программирования AV.)

...