Слияние видео с помощью AVMutableComposition - PullRequest
0 голосов
/ 14 мая 2019

Я объединяю портретное и альбомное видео с помощью AVMutableComposition. Тем не менее, пейзажная часть станет настолько странной после экспорта: Изображение

Как вы можете видеть на изображении, видео в красном квадрате - правильное, но поверх него появляется еще один кадр.

Вот функция, которую я использую для объединения видео:

func mergeVideos(arrayVideos: [AVAsset], completionHandler: @escaping ExportedHandler) {

        var insertTime = CMTime.zero

        var arrayLayerInstructions: [AVMutableVideoCompositionLayerInstruction] = []

        var outputSize = CGSize(width: 0, height: 0)

        // Determine video output size
        for videoAsset in arrayVideos {

            let videoTrack = videoAsset.tracks(withMediaType: .video)[0]

            let assetInfo = orientationFromTransform(transform: videoTrack.preferredTransform)

            var videoSize = videoTrack.naturalSize

            if assetInfo.isPortrait == true {

                videoSize.width = videoTrack.naturalSize.height
                videoSize.height = videoTrack.naturalSize.width
            }

            if videoSize.height > outputSize.height {

                outputSize = videoSize
            }
        }

        if outputSize.width == 0 || outputSize.height == 0 {

            outputSize = defaultSize
        }

        // Init composition
        let mixComposition = AVMutableComposition()

        // Init video & audio composition track
        guard let videoCompositionTrack =
            mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid),
              let audioCompositionTrack =
            mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
            else { return }

        for videoAsset in arrayVideos {

            // Get video track and audio track
            let videoTrack = videoAsset.tracks(withMediaType: .video)[0]
            let audioTrack = videoAsset.tracks(withMediaType: .audio)[0]

            let timeRange = CMTimeRangeMake(start: .zero, duration: videoAsset.duration)

            do {

                // Add video track to video composition at specific time
                try videoCompositionTrack.insertTimeRange(timeRange, of: videoTrack, at: insertTime)

            } catch {

                print("Load video track error")
            }

            do {

                try audioCompositionTrack.insertTimeRange(timeRange, of: audioTrack, at: insertTime)

            } catch {

                print("Load audio track error")
            }

            // Add instruction for video track
            let layerInstruction = videoCompositionInstructionForTrack(track: videoCompositionTrack,
                                                                       asset: videoAsset,
                                                                       standardSize: outputSize,
                                                                       atTime: insertTime)

            // Increase the insert time
            insertTime = CMTimeAdd(insertTime, videoAsset.duration)

            layerInstruction.setOpacity(0.0, at: insertTime)

            arrayLayerInstructions.append(layerInstruction)
        }

        // Main video composition instruction
        let mainInstruction = AVMutableVideoCompositionInstruction()
        mainInstruction.timeRange = CMTimeRangeMake(start: .zero, duration: insertTime)
        mainInstruction.layerInstructions = arrayLayerInstructions

        // Main video composition
        let mainComposition = AVMutableVideoComposition()
        mainComposition.instructions = [mainInstruction]
        mainComposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
        mainComposition.renderSize = outputSize

        // Export to file
        let path = NSTemporaryDirectory().appending("mergedVideo.mp4")
        let exportUrl = URL.init(fileURLWithPath: path)

        // Remove file if existed
        FileManager.default.removeItemIfExisted(at: exportUrl)

        // Init exporter
        guard let assetExport = AVAssetExportSession(asset: mixComposition,
                                                     presetName: AVAssetExportPresetHighestQuality) else { return }
        assetExport.videoComposition = mainComposition
        assetExport.outputURL = exportUrl
        assetExport.outputFileType = .mp4

        // Do export
        assetExport.exportAsynchronously(completionHandler: { [weak self] in

            switch assetExport.status {

            case .completed:
                self?.exportDidFinish(exporter: assetExport, videoURL: exportUrl, completion: completionHandler)

            case  .failed:
                completionHandler(nil, nil, assetExport.error)

                print("failed:", assetExport.error as Any)

            case .cancelled:
                completionHandler(nil, nil, assetExport.error)

                print("cancelled", assetExport.error as Any)

            default:
                print("complete")
            }
        })
    }

А вот вспомогательный метод, который я использую для анализа ориентации видео:

private func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImage.Orientation, isPortrait: Bool) {

        var assetOrientation = UIImage.Orientation.up

        var isPortrait = false

        if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {

            assetOrientation = .right

            isPortrait = true

        } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {

            assetOrientation = .left

            isPortrait = true

        } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {

            assetOrientation = .up

        } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {

            assetOrientation = .down

        }

        return (assetOrientation, isPortrait)
    }

private func videoCompositionInstructionForTrack(track: AVCompositionTrack, 
                                                 asset: AVAsset, 
                                                 standardSize: CGSize, 
                                                 atTime: CMTime) -> AVMutableVideoCompositionLayerInstruction {

        let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
        let assetTrack = asset.tracks(withMediaType: .video)[0]

        let transform = assetTrack.preferredTransform
        let assetInfo = orientationFromTransform(transform: transform)

        var aspectFillRatio: CGFloat = 1

        if assetInfo.isPortrait {

            aspectFillRatio = standardSize.width / assetTrack.naturalSize.height

            let scaleFactor = CGAffineTransform(scaleX: aspectFillRatio, y: aspectFillRatio)

            let posX = standardSize.width / 2 - (assetTrack.naturalSize.height * aspectFillRatio) / 2
            let posY = standardSize.height / 2 - (assetTrack.naturalSize.width * aspectFillRatio) / 2
            let moveFactor = CGAffineTransform(translationX: posX, y: posY)

            let concat = assetTrack.preferredTransform.concatenating(scaleFactor).concatenating(moveFactor)

            instruction.setTransform(concat, at: atTime)

        } else {

            aspectFillRatio = standardSize.width / assetTrack.naturalSize.width

            let scaleFactor = CGAffineTransform(scaleX: aspectFillRatio, y: aspectFillRatio)

            let posX = standardSize.width / 2 - (assetTrack.naturalSize.width * aspectFillRatio) / 2
            let posY = standardSize.height / 2 - (assetTrack.naturalSize.height * aspectFillRatio) / 2
            let moveFactor = CGAffineTransform(translationX: posX, y: posY)

            var concat = assetTrack.preferredTransform.concatenating(scaleFactor).concatenating(moveFactor)

            if assetInfo.orientation == .down {

                let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(Double.pi))
                concat = fixUpsideDown.concatenating(scaleFactor).concatenating(moveFactor)
            }

            instruction.setTransform(concat, at: atTime)
        }

        return instruction
    }

Я обнаружил, что если я добавлю ниже код внутри цикла for-in, то эта проблема будет решена, но как только я объединю более 10 видео за раз, мое приложение будет аварийно завершено.

guard let videoCompositionTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid),
      let audioCompositionTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) else { return }

Я застрял в этой проблеме много дней, но до сих пор не могу понять, почему. Любая помощь будет принята с благодарностью. Большое спасибо!

...