AVMutableComposition изменение renderSize не работает - PullRequest
0 голосов
/ 27 октября 2018

Я пытаюсь сделать видеоредактор, чтобы пользователи могли сохранять видео с заданным разрешением.Пользователи могут выбирать из множества кадров, и моя программа должна соответствовать видео с разрешением 1920x1080 и 1080x1920.

Проблема в том, что ориентация и размер сохраненного видео неверны.Я знаю, что эта проблема связана с видеослойной инструкцией преобразования.Но я не знаю, как правильно его настроить.

- (void)videoOutput:(AVAsset *)videoAsset backgroundImage:(UIImage *)backgroundImage containerSize:(CGSize)containerSize videoFrame:(CGRect)videoFrame completionBlock:(SaveCompletionBlock)completionBlock {
    self.completionBlock = completionBlock;

    // 2 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
    AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];

    // 3 - Video track
    AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                                        preferredTrackID:kCMPersistentTrackID_Invalid];
    [videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
                        ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
                         atTime:kCMTimeZero error:nil];


    // Audio track
    AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
    [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
                        ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];


    // 3.1 - Create AVMutableVideoCompositionInstruction
    AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);

    // 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.
    AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
    AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    UIImageOrientation videoAssetOrientation_  = UIImageOrientationUp;
    BOOL isVideoAssetPortrait_  = NO;
    CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
    if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
        videoAssetOrientation_ = UIImageOrientationRight;
        isVideoAssetPortrait_ = YES;
    }
    if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
        videoAssetOrientation_ =  UIImageOrientationLeft;
        isVideoAssetPortrait_ = YES;
    }
    if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
        videoAssetOrientation_ =  UIImageOrientationUp;
    }
    if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
        videoAssetOrientation_ = UIImageOrientationDown;
    }


    CGSize naturalSize;
    if(isVideoAssetPortrait_){
        naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
    } else {
        naturalSize = videoAssetTrack.naturalSize;
    }

    CGAffineTransform transform;
    if (isVideoAssetPortrait_) {
        CGFloat scale = naturalSize.height/naturalSize.width;
        transform = CGAffineTransformMakeScale(scale, 1);
        transform = CGAffineTransformConcat(videoTrack.preferredTransform, transform);
    }
    else {
        CGFloat scale = naturalSize.width/naturalSize.height;
        transform = CGAffineTransformMakeScale(1, scale);
    }
    [videolayerInstruction setTransform:transform atTime:kCMTimeZero];
    [videolayerInstruction setOpacity:0.0 atTime:videoAsset.duration];


    // 3.3 - Add instructions
    mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction, nil];

    AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];



    CGFloat scale = naturalSize.height/videoFrame.size.height;

    mainCompositionInst.renderSize = CGSizeMake(containerSize.width*scale, containerSize.height*scale);
    mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
    mainCompositionInst.frameDuration = CMTimeMake(1, 30);


    CGRect originalVideoFrame = CGRectMake(videoFrame.origin.x*scale, videoFrame.origin.y*scale, videoFrame.size.width*scale, videoFrame.size.height*scale);
    [self applyVideoEffectsToComposition:mainCompositionInst containerSize:mainCompositionInst.renderSize videoFrame:originalVideoFrame backgroundImage:backgroundImage];

    // 4 - Get path
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
    NSString *documentsDirectory = [paths objectAtIndex:0];
    NSString *myPathDocs =  [documentsDirectory stringByAppendingPathComponent:
                             [NSString stringWithFormat:@"FinalVideo-%d.mov",arc4random() % 1000]];
    NSURL *url = [NSURL fileURLWithPath:myPathDocs];

    // 5 - Create exporter
    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
                                                                      presetName:AVAssetExportPresetHighestQuality];
    exporter.outputURL = url;
    exporter.outputFileType = AVFileTypeQuickTimeMovie;
    exporter.shouldOptimizeForNetworkUse = YES;
    exporter.videoComposition = mainCompositionInst;
    [exporter exportAsynchronouslyWithCompletionHandler:^{
        dispatch_async(dispatch_get_main_queue(), ^{
            [self exportDidFinish:exporter];
        });
    }]; }


- (void)applyVideoEffectsToComposition:(AVMutableVideoComposition *)composition containerSize:(CGSize)containerSize videoFrame:(CGRect)videoFrame backgroundImage:(UIImage
*)backgroundImage {

    CALayer *parentLayer = [CALayer layer];
    parentLayer.frame = CGRectMake(0, 0, containerSize.width, containerSize.height);
    parentLayer.backgroundColor = [UIColor yellowColor].CGColor;


    CALayer *backgroundLayer = [CALayer layer];
    [backgroundLayer setContents:(id)[backgroundImage CGImage]];
    backgroundLayer.frame = parentLayer.bounds;
    [backgroundLayer setMasksToBounds:YES];
    backgroundLayer.backgroundColor = [UIColor cyanColor].CGColor;


    NSLog(@"%@", NSStringFromCGSize(containerSize));
    NSLog(@"%@", NSStringFromCGRect(videoFrame));

    CALayer *videoLayer = [CALayer layer];
    videoLayer.frame = videoFrame;
    videoLayer.backgroundColor = [UIColor redColor].CGColor;


    [parentLayer addSublayer:backgroundLayer];
    [parentLayer addSublayer:videoLayer];

    composition.animationTool = [AVVideoCompositionCoreAnimationTool
                                 videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer]; }
...