iOS: как обрезать видео, взяв только определенный размер в центре видео? - PullRequest
0 голосов
/ 21 декабря 2018

У меня есть видео, и я хотел бы получить определенный размер в центре видео.Например, мое видео размером 1920x960 выглядит следующим образом: 1920x960 И я хотел бы взять 1420x560 из центра видео. Вот так: 1420x560

Я перепробовал много вещей, но невозможно найти хороший способ.Когда я касаюсь значений videoComposition.renderSize, я пытался взять квадрат (чтобы быть более простым для примера) в середине видео, но он не в центре ...

    - (CGFloat)getComplimentSize:(CGFloat)size {
        CGRect screenRect = [[UIScreen mainScreen] bounds];
        CGFloat ratio = screenRect.size.height / screenRect.size.width;

        // we have to adjust the ratio for 16:9 screens
        if (ratio == 1.775) ratio = 1.77777777777778;

        return size * ratio;
    }

    - (UIInterfaceOrientation)orientationForTrack:(AVAsset *)asset {
        UIInterfaceOrientation orientation = UIInterfaceOrientationPortrait;
        NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];

        if([tracks count] > 0) {
            AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
            CGAffineTransform t = videoTrack.preferredTransform;

            // Portrait
            if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0) {
                orientation = UIInterfaceOrientationPortrait;
            }
            // PortraitUpsideDown
            if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) {
                orientation = UIInterfaceOrientationPortraitUpsideDown;
            }
            // LandscapeRight
            if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0) {
                orientation = UIInterfaceOrientationLandscapeRight;
            }
            // LandscapeLeft
            if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0) {
                orientation = UIInterfaceOrientationLandscapeLeft;
            }
        }
        return orientation;
    }

     // output file
        NSString* docFolder = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject];
        NSString* outputPath = [docFolder stringByAppendingPathComponent:@"output2.mov"];
        if ([[NSFileManager defaultManager] fileExistsAtPath:outputPath])
            [[NSFileManager defaultManager] removeItemAtPath:outputPath error:nil];

        // input file
        AVMutableComposition *composition = [AVMutableComposition composition];
        [composition  addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

        // input clip
        NSString *filePath = [[self documentsDirectory] stringByAppendingPathComponent:@"testVideo.MP4"];
        AVAsset* asset = [AVAsset assetWithURL:[NSURL fileURLWithPath:filePath]];
        AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];

        // crop clip to screen ratio
        UIInterfaceOrientation orientation = [self orientationForTrack:asset];
        BOOL isPortrait = (orientation == UIInterfaceOrientationPortrait || orientation == UIInterfaceOrientationPortraitUpsideDown) ? YES: NO;
        CGFloat complimentSize = [self getComplimentSize:videoTrack.naturalSize.height];
        CGSize videoSize;

        if(isPortrait) {
            videoSize = CGSizeMake(videoTrack.naturalSize.height, complimentSize);
        } else {
            videoSize = CGSizeMake(complimentSize, videoTrack.naturalSize.height);
        }

        // make render size square
        AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
        videoComposition.renderSize = CGSizeMake(1420,560);
        videoComposition.frameDuration = CMTimeMake(1, 30);

        AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
        instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30) );

        // rotate and position video
        AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];

        CGFloat tx = (videoTrack.naturalSize.width-complimentSize)/2;
        if (orientation == UIInterfaceOrientationPortrait || orientation == UIInterfaceOrientationLandscapeRight) {
            // invert translation
            tx *= -1;
        }

        // t1: rotate and position video since it may have been cropped to screen ratio
        CGAffineTransform t1 = CGAffineTransformTranslate(videoTrack.preferredTransform, tx, 0);
        // t2/t3: mirror video horizontally
    //    CGAffineTransform t2 = CGAffineTransformTranslate(t1, isPortrait?0:videoTrack.naturalSize.width, isPortrait?videoTrack.naturalSize.height:0);
    //    CGAffineTransform t3 = CGAffineTransformScale(t2, isPortrait?1:-1, isPortrait?-1:1);

        [transformer setTransform:t1 atTime:kCMTimeZero];
        instruction.layerInstructions = [NSArray arrayWithObject: transformer];
        videoComposition.instructions = [NSArray arrayWithObject: instruction];

        // export
        AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality] ;
        exporter.videoComposition = videoComposition;
        exporter.outputURL=[NSURL fileURLWithPath:outputPath];
        exporter.outputFileType=AVFileTypeQuickTimeMovie;

        [exporter exportAsynchronouslyWithCompletionHandler:^(void){
            NSLog(@"Exporting done!");
        }];

У вас есть идеи?Большое спасибо.

...