Первое видео появится после объединения разных видео - PullRequest
0 голосов
/ 21 апреля 2020

Я создаю приложение для видео-коллажа, в котором я объединяю несколько видео в разных кадрах и делаю одно видео. Но все кадры показывают первое видео. Другие видео не отображаются после слияния. Пожалуйста, дайте мне предложение как можно скорее. Мой код ниже.

 func newoverlay() {

        // 1 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
        let mixComposition = AVMutableComposition()
        var arrayOfComposition = Array<AVMutableCompositionTrack>()
        var trackInstruction = Array<AVVideoCompositionLayerInstruction>()
        var videolayer = Array<CALayer>()
        var i:Int = 0
        let mainInstruction = AVMutableVideoCompositionInstruction()
        var assetDuration:CMTime = CMTime.zero
        var box = Array<CALayer>()
        var arrOfIns = Array<AVMutableVideoCompositionInstruction>()
        var atTimeM : CMTime = CMTimeMake(value: 0, timescale: 0)
        var lastAsset: AVURLAsset!
        // 2 - Create two video tracks
        for videoAssetss in firstAsset {

            guard var firstTrack = mixComposition.addMutableTrack(withMediaType: .video,
                                                                  preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else { return }
            do {
                try firstTrack.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: (videoAssetss as? AVURLAsset)!.duration),
                                               of: (videoAssetss as? AVURLAsset)!.tracks(withMediaType: .video)[0],
                                               at: CMTime.zero)
                var firstInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: firstTrack)
                guard let cor = photoFrameCordinate[i] as? CGRect else{return}
                if videoAssetss as! AVURLAsset != firstAsset.last as! AVURLAsset{
                firstInstruction.setOpacity(0, at: assetDuration) // asseteDuration
                              }
                let transform = CGAffineTransform(scaleX: 0.4, y:1).concatenating(CGAffineTransform(translationX: trackInstruction[i-1]., y: -cor.origin.y))
                firstInstruction.setTransform(transform, at: CMTime.zero)
                assetDuration =  CMTimeAdd(assetDuration, (videoAssetss as! AVURLAsset).duration)
                lastAsset = videoAssetss as? AVURLAsset
                trackInstruction.append(firstInstruction)

                i += 1
               // arrayOfComposition.append(firstTrack)
            } catch {
                print("Failed to load first track")
                return
            }
        }

        // Watermark Effect

        let width: CGFloat =  widthConstraintViewForImage.constant
        let height = heightConstraintViewForImage.constant
        let parentlayer = CALayer()
        parentlayer.frame = CGRect(x: 0, y: 0, width: width, height: height)

        //Mark: Frame layer
        let bglayer = CALayer()
        bglayer.contents = imgViewForAdminImage.image?.cgImage
        bglayer.frame = CGRect(x: 0, y: 0, width: width, height: height)
        bglayer.backgroundColor = UIColor.clear.cgColor

        for index in 0..<videoURLS.count{
            var videoBox = CALayer()

            guard let cor = photoFrameCordinate[index] as? CGRect else{return}
            videoBox.frame = CGRect(x: cor.origin.x, y: parentlayer.frame.maxY-(cor.origin.y+cor.size.height), width: cor.size.width, height: cor.size.height)
            videoBox.backgroundColor = UIColor.green.cgColor
            videoBox.masksToBounds = true
            var vlayer = CALayer()
            vlayer.contentsScale = 1.0
            vlayer.contentsGravity = CALayerContentsGravity.center
            vlayer.frame = CGRect(x: 0, y: 0, width:cor.size.width, height: cor.size.height)
            vlayer.backgroundColor = UIColor.yellow.cgColor
            videolayer.append(vlayer)
            videoBox.addSublayer(vlayer)
            box.append(videoBox)
            bglayer.addSublayer(videoBox)
        }

        parentlayer.addSublayer(bglayer)
        let layercomposition = AVMutableVideoComposition()
        layercomposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
        layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayers: videolayer, in: parentlayer)



        // 2.1

            mainInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: assetDuration)
            mainInstruction.layerInstructions = trackInstruction
            mainInstruction.backgroundColor = UIColor.red.cgColor





        layercomposition.instructions = [mainInstruction]
      //  layercomposition.renderSize = CGSizeMake(videoSize.width * scale, videoSize.height * scale)
        layercomposition.renderScale = 1.0
        layercomposition.renderSize = CGSize(width: width, height: height)

        //  create new file to receive data
        let dirPaths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
        let docsDir = dirPaths[0] as NSString
        let movieFilePath = docsDir.appendingPathComponent("result.mp4")
        let movieDestinationUrl = NSURL(fileURLWithPath: movieFilePath)

        // use AVAssetExportSession to export video
        let assetExport = AVAssetExportSession(asset: mixComposition, presetName:AVAssetExportPresetMediumQuality)
        assetExport?.outputFileType = AVFileType.mp4
        assetExport?.videoComposition = layercomposition

        // Check exist and remove old file
        FileManager.default.removeItemIfExisted(movieDestinationUrl as URL)

        assetExport?.outputURL = movieDestinationUrl as URL
        assetExport?.exportAsynchronously(completionHandler: {
            switch assetExport!.status {
            case AVAssetExportSession.Status.failed:
                print("failed")
                print(assetExport?.error ?? "unknown error")
            case AVAssetExportSession.Status.cancelled:
                print("cancelled")
                print(assetExport?.error ?? "unknown error")
            default:
                print("Movie complete")



                PHPhotoLibrary.shared().performChanges({
                    PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: movieDestinationUrl as URL)
                }) { saved, error in
                    if saved {
                        print("Saved")
                    }else{
                        print(error!)
                    }
                }

                self.playVideo()`enter code here`

            }
        })
    }   

Когда я устанавливаю непрозрачность первого видео на 0, тогда второе видео показывается во всех кадрах. Я думаю, что все видео идут, но за первым видео, поэтому только первое видео показывается во всех кадрах.

Добро пожаловать на сайт PullRequest, где вы можете задавать вопросы и получать ответы от других членов сообщества.
...