AVFoundation - объединить несколько видео - добавить анимацию между видео - PullRequest
0 голосов
/ 26 июня 2019

Я пытаюсь объединить изображения и видеоклипы вместе. Я сохранил возможность добавлять анимацию между видео и изображениями. Есть несколько вариантов, таких как постепенное увеличение, постепенное исчезновение, поворот, скольжение вверх, скольжение вниз, влево, вправо и т. Д. Для изображений я могу добавить анимацию, но как добавить анимацию для видео? В частности, когда видеоклип будет завершен и в этот раз будет запущен другой видеоклип, я хочу добавить анимацию. Теперь моя функциональность слияния работает хорошо. Только для добавления анимации между видео.

Я пробовал с:

instruction.setOpacityRamp(fromStartOpacity: <#T##Float#>, toEndOpacity: <#T##Float#>, timeRange: <#T##CMTimeRange#>)

, но этот параметр показывает только эффект постепенного появления / исчезновения. Но другие пользовательские варианты анимации, где добавить эти эффекты и как?

Вот мой исходный код для слияния. Многие зависимые функции есть в коде. Но я разместил только код функциональности слияния. Я прокомментировал с //HERE TO ADD THE ANIMATION. Так что вы можете напрямую добраться до той точки, где я пытаюсь добавить анимацию.

func merge(allAssets: [MovieAssetPresentable], isHDR: Bool, success: @escaping (URL?) -> (Void), progress: @escaping (CGFloat) -> (Void), failed: @escaping (String?) -> (Void)) {
    cancelExport()
    let defaultSize = isHDR ? self.videoOutputResolution.HD : self.videoOutputResolution.lowQuality
    let videoPresetName = self.getPresetName(resolution: defaultSize)

    self.mergeSuccess = success
    self.mergeError = failed
    self.mergeProgress = progress

    let mixComposition = AVMutableComposition()

    let mainInstruction = AVMutableVideoCompositionInstruction()

    var layerInstructions = [AVMutableVideoCompositionLayerInstruction]()

    guard let urlVideoForBackground = Bundle.main.url(forResource: "black", withExtension: "mov") else {
        self.mergeError("Need black background video !")
        return
    }

    let assetForBackground = AVAsset(url: urlVideoForBackground)

    let trackForBackground = assetForBackground.tracks(withMediaType: AVMediaType.video).first

    //Set output size
    var outputSize = CGSize.zero

    for asset in allAssets.filter({$0.assetType! == .video}) {
        guard let videoAsset = asset.asset else { continue }

        // Get video track
        guard let videoTrack = videoAsset.tracks(withMediaType: AVMediaType.video).first else { continue }

        let assetInfo = self.orientationFromTransform(videoTrack.preferredTransform)

        var videoSize = videoTrack.naturalSize

        if assetInfo.isPortrait == true {
            videoSize.width = videoTrack.naturalSize.height
            videoSize.height = videoTrack.naturalSize.width
        }

        if videoSize.height > outputSize.height {
            outputSize = CGSize(width: defaultSize.width, height: ((videoSize.height / videoSize.width) * defaultSize.width))
        }
    }

    if outputSize == CGSize.zero {
        outputSize = defaultSize
    }

    debugPrint("OUTPUT SIZE: \(outputSize)")
    let layerContentsGravity = VideoSettings.shared.fetchVideoFitClips()
    var layerImages = [CALayer]()

    var insertTime = CMTime.zero

    var audioMixInputParameters = [AVMutableAudioMixInputParameters]()


    // Init Video layer
    let videoLayer = CALayer()
    videoLayer.frame = CGRect.init(x: 0, y: 0, width: outputSize.width, height: outputSize.height)
    videoLayer.contentsGravity = layerContentsGravity

    let parentlayer = CALayer()
    parentlayer.frame = CGRect.init(x: 0, y: 0, width: outputSize.width, height: outputSize.height)
    parentlayer.addSublayer(videoLayer)


    for asset in allAssets.filter({$0.assetType! == .image || $0.assetType! == .video}) {
        //Video speed level
        let videoSpeed = Double(asset.videoSpeedLevel!)
        if asset.assetType! == .video {
            //Video asset
            let ast = asset.asset!
            let duration = asset.endTime! - asset.beginTime! //ast.duration

            //Create AVMutableCompositionTrack object
            guard let track = mixComposition.addMutableTrack(withMediaType: AVMediaType.video,
                                                             preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else {
                                                                self.mergeError("Unable to create track.")
                                                                continue
            }

            //Add original video sound track
            let originalSoundTrack: AVMutableCompositionTrack?
            if asset.asset!.tracks(withMediaType: .audio).count > 0 {
                originalSoundTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
                do {
                    try originalSoundTrack?.insertTimeRange(CMTimeRange(start: asset.beginTime ?? CMTime.zero, duration: duration), of: ast.tracks(withMediaType: AVMediaType.audio)[0], at: insertTime)
                } catch {
                    self.mergeError("Unable to create original audio track.")
                    continue
                }
                //Set video original sound track speed
                originalSoundTrack?.scaleTimeRange(CMTimeRange(start: asset.beginTime ?? CMTime.zero, duration: duration), toDuration: CMTime(value: CMTimeValue(Double(duration.value) * videoSpeed), timescale: duration.timescale))

                let audioInputParams = AVMutableAudioMixInputParameters(track: originalSoundTrack)
                audioInputParams.setVolume(asset.videoOriginalVolume!, at: CMTime.zero)
                audioInputParams.trackID = originalSoundTrack?.trackID ?? kCMPersistentTrackID_Invalid
                audioMixInputParameters.append(audioInputParams)
            }

            //Set time range
            do {
                try track.insertTimeRange(CMTimeRange(start: asset.beginTime ?? CMTime.zero, duration: duration),
                                          of: ast.tracks(withMediaType: AVMediaType.video)[0],
                                          at: insertTime)
            } catch let err {
                self.mergeError("Failed to load track: \(err.localizedDescription)")
                continue
            }
            //Set video speed
            track.scaleTimeRange(CMTimeRange(start: asset.beginTime ?? CMTime.zero, duration: duration), toDuration: CMTime(value: CMTimeValue(Double(duration.value) * videoSpeed), timescale: duration.timescale))

            insertTime = CMTimeAdd(insertTime, duration)


            let instruction = self.videoCompositionInstruction(track, asset: ast, outputSize: outputSize)
            //                let instruction = videoCompositionInstructionForTrack(track: t, asset: ast, standardSize: outputSize, atTime: insertTime)
            instruction.setOpacity(0.0, at: insertTime)

            //HERE TO ADD THE ANIMATION

            layerInstructions.append(instruction)
        } else {
            //Image data
            let videoCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video,
                                                                       preferredTrackID: Int32(kCMPersistentTrackID_Invalid))

            let defaultImageTime = CMTimeGetSeconds(asset.endTime!) - CMTimeGetSeconds(asset.beginTime!)
            let duration = CMTime.init(seconds:defaultImageTime, preferredTimescale: assetForBackground.duration.timescale)
            do {
                try videoCompositionTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: duration),
                                                           of: trackForBackground!,
                                                           at: insertTime)
            }
            catch {
                self.mergeError("Background time range error")
            }

            guard let image = UIImage(data: asset.imageData!) else { continue }

            // Create Image layer
            let imageLayer = CALayer()
            imageLayer.frame = CGRect.init(origin: CGPoint.zero, size: outputSize)
            imageLayer.contents = image.cgImage
            imageLayer.opacity = 0
            imageLayer.contentsGravity = layerContentsGravity

            self.setOrientation(image: image, onLayer: imageLayer)

            // Add Fade in & Fade out animation
            let fadeInAnimation = CABasicAnimation.init(keyPath: "opacity")
            fadeInAnimation.duration = 1
            fadeInAnimation.fromValue = NSNumber(value: 0)
            fadeInAnimation.toValue = NSNumber(value: 1)
            fadeInAnimation.isRemovedOnCompletion = false
            fadeInAnimation.beginTime = CMTimeGetSeconds(insertTime) == 0 ? 0.05: CMTimeGetSeconds(insertTime)
            fadeInAnimation.fillMode = CAMediaTimingFillMode.forwards
            imageLayer.add(fadeInAnimation, forKey: "opacityIN")

            let fadeOutAnimation = CABasicAnimation.init(keyPath: "opacity")
            fadeOutAnimation.duration = 1
            fadeOutAnimation.fromValue = NSNumber(value: 1)
            fadeOutAnimation.toValue = NSNumber(value: 0)
            fadeOutAnimation.isRemovedOnCompletion = false
            fadeOutAnimation.beginTime = CMTimeGetSeconds(CMTimeAdd(insertTime, duration))
            fadeOutAnimation.fillMode = CAMediaTimingFillMode.forwards
            imageLayer.add(fadeOutAnimation, forKey: "opacityOUT")

            layerImages.append(imageLayer)

            // Increase the insert time
            insertTime = CMTimeAdd(insertTime, duration)
        }
    }


    // Add Image layers
    for layer in layerImages {
        parentlayer.addSublayer(layer)
    }

    //Add Water mark if Subscription not activated
    if !AddManager.shared.hasActiveSubscription {

        let imglogo = UIImage(named: "watermark")
        let waterMarklayer = CALayer()
        waterMarklayer.contents = imglogo?.cgImage
        let sizeOfWaterMark = Utility.getWaterMarkSizeWithVideoSize(videoSize: outputSize, defaultSize: waterMarkSize)
        debugPrint("sizeOfWaterMark=\(sizeOfWaterMark)")
        waterMarklayer.frame = CGRect(x: outputSize.width - (sizeOfWaterMark.width+10), y: 5, width: sizeOfWaterMark.width, height: sizeOfWaterMark.height)
        waterMarklayer.contentsGravity = .resizeAspect
        waterMarklayer.opacity = 1.0
        parentlayer.addSublayer(waterMarklayer)
    }

    mainInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: insertTime)
    mainInstruction.layerInstructions = layerInstructions
    mainInstruction.backgroundColor = VideoSettings.shared.fetchVideoBackgroundColor().color.cgColor

    let mainComposition = AVMutableVideoComposition()
    mainComposition.instructions = [mainInstruction]
    mainComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
    mainComposition.renderSize = outputSize
    mainComposition.renderScale = 1.0
    mainComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentlayer)

    for audioAsset in allAssets.filter({$0.assetType! == .audio}) {
        //NOTE: If you have requirement to increase/ decrease audio fade-in fade-out effect time, please modify fadeInFadeOutEffectTiming variable as second
        let fadeInFadeOutEffectTiming = Double(3) //seconds

        let volumeLevel = audioAsset.audioVolumeLevel!
        let isFadeIn = audioAsset.audioFadeInEffect!
        let isFadeOut = audioAsset.audioFadeOutEffect!
        var audioBeginTime = audioAsset.beginTime!
        var audioEndTime = audioAsset.endTime!
        var audioTrackTime = audioAsset.audioTrackStartTime!
        var trimmedAudioDuration = CMTimeSubtract(audioEndTime, audioBeginTime)

        //If audio starting position (second) is greater than equals to zero (in order to video length)

        if CMTimeGetSeconds(CMTimeAdd(audioTrackTime, audioBeginTime)) >= 0 {
            //If audio starting position (second) more than video length, i.e. total video length is 20 second, but audio starting position is from 24 seconds, we sould not add the audio
            if CMTimeCompare(CMTimeAdd(audioTrackTime, audioBeginTime), insertTime) == 1 {
                trimmedAudioDuration = CMTime.zero
            } else {
                //If audio start position (seconds) + crop length is exceed total video length, we should add only the part within the video
                if CMTimeCompare(CMTimeAdd(CMTimeAdd(audioTrackTime, audioBeginTime), trimmedAudioDuration), insertTime) == 1 {
                    audioTrackTime = CMTimeAdd(audioTrackTime, audioBeginTime)
                    trimmedAudioDuration = CMTimeSubtract(insertTime, audioTrackTime)
                } else {
                    audioTrackTime = CMTimeAdd(audioTrackTime, audioBeginTime)
                }
            }
        }
            //If audio start time is in negative (second)
        else {
            //If audio crop length is in negative (second)
            if CMTimeCompare(CMTimeAdd(CMTimeAdd(audioTrackTime, audioBeginTime), trimmedAudioDuration), CMTime.zero) == -1 {
                trimmedAudioDuration = CMTime.zero
            } else {
                audioBeginTime = CMTime(seconds: abs(CMTimeGetSeconds(audioTrackTime)), preferredTimescale: audioTrackTime.timescale)
                audioTrackTime = CMTime.zero
                trimmedAudioDuration = CMTimeSubtract(audioEndTime, audioBeginTime)
                if CMTimeCompare(trimmedAudioDuration, insertTime) == 1 {
                    trimmedAudioDuration = insertTime
                }
            }
        }

        if trimmedAudioDuration != CMTime.zero {
            audioEndTime = CMTimeAdd(audioTrackTime, trimmedAudioDuration)
            let audioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: kCMPersistentTrackID_Invalid)
            do {
                try audioTrack?.insertTimeRange(CMTimeRangeMake(start: audioBeginTime , duration: trimmedAudioDuration),
                                                of: audioAsset.asset!.tracks(withMediaType: AVMediaType.audio)[0] ,
                                                at: audioTrackTime)
                let audioInputParams = AVMutableAudioMixInputParameters(track: audioTrack)

                var effectTime = CMTime(seconds: fadeInFadeOutEffectTiming, preferredTimescale: 600)
                if CMTimeCompare(trimmedAudioDuration, CMTimeMultiply(effectTime, multiplier: 2)) == -1 {
                    effectTime = CMTime(seconds: CMTimeGetSeconds(trimmedAudioDuration) / 2, preferredTimescale: 600)
                }

                //Fade in effect
                audioInputParams.setVolumeRamp(fromStartVolume: isFadeIn ? 0 : volumeLevel, toEndVolume: volumeLevel, timeRange: CMTimeRange(start: audioTrackTime, duration: effectTime))
                //Fade out effect
                audioInputParams.setVolumeRamp(fromStartVolume: volumeLevel, toEndVolume: isFadeOut ? 0 : volumeLevel, timeRange: CMTimeRange(start: CMTimeSubtract(audioEndTime, effectTime), duration: effectTime))

                audioInputParams.trackID = audioTrack?.trackID ?? kCMPersistentTrackID_Invalid
                audioMixInputParameters.append(audioInputParams)
            } catch {
                print("Failed to load Audio track")
            }
        }
    }

    // 4 - Get path
    guard let url = Utility.createFileAtDocumentDirectory(name: "mergeVideo-\(Date().timeIntervalSince1970).mp4") else {
        debugPrint("Unable to file at document directory")
        return
    }

    // 5 - Create Exporter
    self.exporter = AVAssetExportSession(asset: mixComposition, presetName: videoPresetName)
    guard let exp = self.exporter else {
        debugPrint("Unable to export.")
        return
    }
    let audioMix = AVMutableAudioMix()
    audioMix.inputParameters = audioMixInputParameters
    exp.outputURL = url
    exp.outputFileType = AVFileType.mp4
    exp.shouldOptimizeForNetworkUse = true
    exp.videoComposition = mainComposition
    exp.audioMix = audioMix

    //self.viewPieProgress.setProgress(0.0, animated: false)
    //viewPieProgress.isHidden = isHDR

    //timer for progress
    self.timer = Timer.scheduledTimer(timeInterval: 1.0, target: self, selector: #selector(self.updateExportingProgress(timer:)), userInfo: exp, repeats: true)

    // 6 - Perform the Export
    exp.exportAsynchronously() {
        DispatchQueue.main.async {
            self.exportDidFinish(exp)
        }
    }
}

Я перепробовал много вариантов, но ничто не отвечает моим требованиям. Пожалуйста, помогите мне.

Если вам нужна какая-либо другая информация от меня, пожалуйста, не стесняйтесь комментировать этот пост.

Заранее спасибо.

Добро пожаловать на сайт PullRequest, где вы можете задавать вопросы и получать ответы от других членов сообщества.
...