Реализация видео в видео с эффектом размытия в объективе - PullRequest
0 голосов
/ 03 июля 2018

Мне нужно сделать видеоредактор с эффектом размытия на видео. Demo image that I am trying to follow Может кто-нибудь подскажет, пожалуйста, какие-нибудь полезные ссылки или как выполнить эту задачу. Я пытался сделать наложение видео, но оно не приводит меня точно в центр.

- (void) overlapVideos{
           AVURLAsset* firstAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"BearVideo" ofType:@"mp4"]] options:nil];
    AVURLAsset * secondAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"BearVideo" ofType:@"mp4"]] options:nil];

    AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];

    AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    [firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];

    AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
    [secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
    AVMutableVideoCompositionInstruction * instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration);
    AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
    CGAffineTransform Scale = CGAffineTransformMakeScale(0.6f,0.6f);
    CGAffineTransform Move = CGAffineTransformMakeTranslation(140,20);
    [FirstlayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero];
    AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack];
    CGAffineTransform SecondScale = CGAffineTransformMakeScale(0.9f,0.9f);
    CGAffineTransform SecondMove = CGAffineTransformMakeTranslation(0,0);
    [SecondlayerInstruction setTransform:CGAffineTransformConcat(SecondScale,SecondMove) atTime:kCMTimeZero];


    instruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,SecondlayerInstruction,nil];;

    AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
    videoComposition.instructions = [NSArray arrayWithObject:instruction];
    videoComposition.frameDuration = CMTimeMake(1, 30);
    videoComposition.renderSize = CGSizeMake(1280, 720);

    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *documentsDirectory = [paths objectAtIndex:0];
    NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:@"overlapVideo.mov"];

    if([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs])
    {
        [[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil];
    }

    NSURL *url = [NSURL fileURLWithPath:myPathDocs];

    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
    exporter.outputURL=url;
    [exporter setVideoComposition:videoComposition];
    exporter.outputFileType = AVFileTypeQuickTimeMovie;

    [exporter exportAsynchronouslyWithCompletionHandler:^
    {
        dispatch_async(dispatch_get_main_queue(), ^{
            [self exportDidFinish:exporter];
        });
    }];
}

- (void)exportDidFinish:(AVAssetExportSession*)session
{
    NSURL *outputURL = session.outputURL;
    if(self.videodelegateObj!=nil){
        [_videodelegateObj videoOverlappingFinished:outputURL];

    }
}

-(void)applyBlurOnAsset:(AVAsset *)asset Completion:(void(^)(BOOL success, NSError* error, NSURL* videoUrl))completion{
    CIFilter *filter = [CIFilter filterWithName:@"CIGaussianBlur"];
    AVVideoComposition *composition = [AVVideoComposition videoCompositionWithAsset: asset
                                                       applyingCIFiltersWithHandler:^(AVAsynchronousCIImageFilteringRequest *request){
                                                           // Clamp to avoid blurring transparent pixels at the image edges
                                                           CIImage *source = [request.sourceImage imageByClampingToExtent];
                                                           [filter setValue:source forKey:kCIInputImageKey];

                                                           [filter setValue:[NSNumber numberWithDouble:10.0] forKey:kCIInputRadiusKey];

                                                           CIImage *output = [filter.outputImage imageByCroppingToRect:request.sourceImage.extent];

                                        [request finishWithImage:output context:nil];
                                                       }];


    NSURL *outputUrl = [[NSURL alloc] initWithString:@"Your Output path"];

    [[NSFileManager defaultManager]  removeItemAtURL:outputUrl error:nil];

    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPreset960x540] ;
    exporter.videoComposition = composition;
    exporter.outputFileType = AVFileTypeMPEG4;

    if (outputUrl){

        exporter.outputURL = outputUrl;
        [exporter exportAsynchronouslyWithCompletionHandler:^{

            switch ([exporter status]) {
                case AVAssetExportSessionStatusFailed:
                    NSLog(@"crop Export failed: %@", [[exporter error] localizedDescription]);
                    if (completion){
                        dispatch_async(dispatch_get_main_queue(), ^{
                            completion(NO,[exporter error],nil);
                        });
                        return;
                    }
                    break;
                case AVAssetExportSessionStatusCancelled:
                    NSLog(@"crop Export canceled");
                    if (completion){
                        dispatch_async(dispatch_get_main_queue(), ^{
                            completion(NO,nil,nil);
                        });
                        return;
                    }
                    break;
                default:
                    break;
            }

            if (completion){
                dispatch_async(dispatch_get_main_queue(), ^{
                    completion(YES,nil,outputUrl);
                });
            }

        }];
    }
}

Progress made so far by me

Пожалуйста, дайте некоторое руководство. Любая помощь или руководство в этом направлении будут высоко оценены. Заранее спасибо.

...