Видео из коллекции изображений занимает много времени для воспроизведения - PullRequest
0 голосов
/ 15 февраля 2019

Я пытаюсь создать видеоредактор, в котором я сделал следующее:

1) Собранные фотографии из галереи пользователей.

2) Преобразовал этот массив фотографий в видео.

3) Добавлены анимации к видео.

4) Воспроизведенные видео

Код, который я сделал для этого, выглядит следующим образом: -

1) Собранные фотографиииз пользовательской галереи.

    func openImagePicker(){
        let customColor = UIColor.init(red: 64.0/255.0, green: 0.0, blue: 144.0/255.0, alpha: 1.0)
        let customCameraColor = UIColor.init(red: 86.0/255.0, green: 1.0/255.0, blue: 236.0/255.0, alpha: 1.0)

        pickerViewController.numberOfPhotoToSelect = 5;        pickerViewController.theme.titleLabelTextColor = UIColor.white
        pickerViewController.theme.navigationBarBackgroundColor = customColor
        pickerViewController.theme.tintColor = UIColor.white
        pickerViewController.theme.orderTintColor = customCameraColor
        pickerViewController.theme.cameraVeilColor = customCameraColor
        pickerViewController.theme.cameraIconColor = UIColor.white
        pickerViewController.theme.statusBarStyle = .lightContent
        self.yms_presentCustomAlbumPhotoView(pickerViewController, delegate: self)

    }

    func photoPickerViewController(_ picker: YMSPhotoPickerViewController!, didFinishPickingImages photoAssets: [PHAsset]!) {
        picker.dismiss(animated: true) {
            self.selectedImageArray =  NSMutableArray()
            let imageManager = PHImageManager.init()
            let options = PHImageRequestOptions.init()
            options.deliveryMode = .highQualityFormat
            options.resizeMode = .exact
            options.isSynchronous = true
            for asset: PHAsset in photoAssets
            {
                let targetSize = CGSize(width:self.view.frame.size.width
                    , height:self.view.frame.size.width)
                imageManager.requestImage(for: asset, targetSize:targetSize, contentMode: .aspectFill, options: options, resultHandler: { (image, info) in
                    self.selectedImageArray.add(image!)
                })
            }

            let imageVideaMakerController = self.storyboard?.instantiateViewController(withIdentifier: "VideoEditorController") as! VideoEditorController

            imageVideaMakerController.selectedImageArray = self.selectedImageArray as! [UIImage]
            self.navigationController!.pushViewController(imageVideaMakerController, animated: true)

        }
    }

2) Преобразован этот массив фотографий в видео.

override func viewDidAppear(_ animated: Bool) {
        self.navigationController?.navigationBar.isHidden = false

         setUpInitialView()
        collectionView.reloadData()
    }

    //MARK:- Custom Methods
    func setUpInitialView(){
        let loadingNotification = MBProgressHUD.showAdded(to: view, animated: true)
        loadingNotification.mode = MBProgressHUDMode.indeterminate
        loadingNotification.label.text = "Loading"

        buildVideoFromImageArray()
        //filterScrollContents()
    }

func buildVideoFromImageArray() {

        imageArrayToVideoURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/video1.MP4")
        removeFileAtURLIfExists(url: imageArrayToVideoURL)
        guard let videoWriter = try? AVAssetWriter(outputURL: imageArrayToVideoURL as URL, fileType: AVFileType.mp4) else {
            fatalError("AVAssetWriter error")
        }
        let outputSettings = [AVVideoCodecKey : AVVideoCodecH264, AVVideoWidthKey : NSNumber(value: Float(outputSize.width)), AVVideoHeightKey : NSNumber(value: Float(outputSize.height))] as [String : Any]
        guard videoWriter.canApply(outputSettings: outputSettings, forMediaType: AVMediaType.video) else {
            fatalError("Negative : Can't apply the Output settings...")
        }
        let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: outputSettings)
        let sourcePixelBufferAttributesDictionary = [kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32ARGB), kCVPixelBufferWidthKey as String: NSNumber(value: Float(outputSize.width)), kCVPixelBufferHeightKey as String: NSNumber(value: Float(outputSize.height))]
        let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)
        if videoWriter.canAdd(videoWriterInput) {
            videoWriter.add(videoWriterInput)
        }
        if videoWriter.startWriting() {
            let zeroTime = CMTimeMake(Int64(imagesPerSecond),Int32(1))
            videoWriter.startSession(atSourceTime: zeroTime)

            assert(pixelBufferAdaptor.pixelBufferPool != nil)
            let media_queue = DispatchQueue(label: "mediaInputQueue")
            videoWriterInput.requestMediaDataWhenReady(on: media_queue, using: { () -> Void in
                let fps: Int32 = 1
                let framePerSecond: Int64 = Int64(self.imagesPerSecond)
                let frameDuration = CMTimeMake(Int64(self.imagesPerSecond), fps)
                var frameCount: Int64 = 0
                var appendSucceeded = true
                var newImageArr = self.selectedImageArray
                while (!newImageArr.isEmpty) {
                    if (videoWriterInput.isReadyForMoreMediaData) {
                        let nextPhoto = newImageArr.remove(at: 0)
                        let lastFrameTime = CMTimeMake(frameCount * framePerSecond, fps)
                        let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration)
                        var pixelBuffer: CVPixelBuffer? = nil
                        let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferAdaptor.pixelBufferPool!, &pixelBuffer)
                        if let pixelBuffer = pixelBuffer, status == 0 {
                            let managedPixelBuffer = pixelBuffer
                            CVPixelBufferLockBaseAddress(managedPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
                            let data = CVPixelBufferGetBaseAddress(managedPixelBuffer)
                            let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
                            let context = CGContext(data: data, width: Int(self.outputSize.width), height: Int(self.outputSize.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(managedPixelBuffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue)
                            context!.clear(CGRect(x: 0, y: 0, width: CGFloat(self.outputSize.width), height: CGFloat(self.outputSize.height)))
                            let horizontalRatio = CGFloat(self.outputSize.width) / nextPhoto.size.width
                            let verticalRatio = CGFloat(self.outputSize.height) / nextPhoto.size.height
                            //let aspectRatio = max(horizontalRatio, verticalRatio) // ScaleAspectFill
                            let aspectRatio = min(horizontalRatio, verticalRatio) // ScaleAspectFit
                            let newSize: CGSize = CGSize(width: nextPhoto.size.width * aspectRatio, height: nextPhoto.size.height * aspectRatio)
                            let x = newSize.width < self.outputSize.width ? (self.outputSize.width - newSize.width) / 2 : 0
                            let y = newSize.height < self.outputSize.height ? (self.outputSize.height - newSize.height) / 2 : 0
                            context?.draw(nextPhoto.cgImage!, in: CGRect(x: x, y: y, width: newSize.width, height: newSize.height))
                            CVPixelBufferUnlockBaseAddress(managedPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
                            appendSucceeded = pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime)
                        } else {
                            print("Failed to allocate pixel buffer")
                            appendSucceeded = false
                        }
                    }
                    if !appendSucceeded {
                        break
                    }
                    frameCount += 1
                }
                videoWriterInput.markAsFinished()
                videoWriter.finishWriting { () -> Void in
                    print("-----video1 url = \(self.imageArrayToVideoURL)")
                    self.globalVideoURL = self.imageArrayToVideoURL

                    self.asset = AVAsset.init(url:self.imageArrayToVideoURL as URL)

                    self.exportVideoWithAnimation()
                }
            })
        }
    }

3) Добавлена ​​анимация в видео.

func exportVideoWithAnimation() {
        let composition = AVMutableComposition()
        let track =  self.asset?.tracks(withMediaType: AVMediaType.video)
        let videoTrack:AVAssetTrack = track![0] as AVAssetTrack
        let timerange = CMTimeRangeMake(kCMTimeZero, (self.asset?.duration)!)

        let compositionVideoTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: CMPersistentTrackID())!

        do {
            try compositionVideoTrack.insertTimeRange(timerange, of: videoTrack, at: kCMTimeZero)
            compositionVideoTrack.preferredTransform = videoTrack.preferredTransform
        } catch {
            print(error)
        }

        //if your video has sound, you don’t need to check this
        if self.audioIsEnabled {
            let compositionAudioTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: CMPersistentTrackID())!

            for audioTrack in (self.asset?.tracks(withMediaType: AVMediaType.audio))! {
                do {
                    try compositionAudioTrack.insertTimeRange(audioTrack.timeRange, of: audioTrack, at: kCMTimeZero)
                } catch {
                    print(error)
                }
            }
        }

        let size = videoTrack.naturalSize

        let videolayer = CALayer()
        videolayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)

        let parentlayer = CALayer()
        parentlayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height)
        parentlayer.addSublayer(videolayer)

        ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
        //this is the animation part
        var time = [0.00001, 3, 6, 9, 12] //I used this time array to determine the start time of a frame animation. Each frame will stay for 3 secs, thats why their difference is 3
        var imgarray = self.selectedImageArray

        for image in 0..<self.selectedImageArray.count {


            let nextPhoto = imgarray[image]

            let horizontalRatio = CGFloat(self.outputSize.width) / nextPhoto.size.width
            let verticalRatio = CGFloat(self.outputSize.height) / nextPhoto.size.height
            let aspectRatio = min(horizontalRatio, verticalRatio)
            let newSize: CGSize = CGSize(width: nextPhoto.size.width * aspectRatio, height: nextPhoto.size.height * aspectRatio)
            let x = newSize.width < self.outputSize.width ? (self.outputSize.width - newSize.width) / 2 : 0
            let y = newSize.height < self.outputSize.height ? (self.outputSize.height - newSize.height) / 2 : 0
            let blackLayer = CALayer()
            ///#7. opacity(1->0)(top->bottom)///
            //#3. top->bottom///
            //MARK:- Animations==================================
            ///#1. left->right///
            if(self.globalSelectedTransitionTag == 0){

                blackLayer.frame = CGRect(x: -videoTrack.naturalSize.width, y: 0, width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height)
                blackLayer.backgroundColor = UIColor.black.cgColor

                let imageLayer = CALayer()
                imageLayer.frame = CGRect(x: x, y: y, width: newSize.width, height: newSize.height)
                imageLayer.contents = imgarray[image].cgImage
                blackLayer.addSublayer(imageLayer)


                let animation = CABasicAnimation()
                animation.keyPath = "position.x"
                animation.fromValue = -videoTrack.naturalSize.width
                animation.toValue = 5 * (videoTrack.naturalSize.width)
                animation.duration = 5
                animation.beginTime = CFTimeInterval(time[image])
                animation.fillMode = kCAFillModeForwards
                animation.isRemovedOnCompletion = false
                blackLayer.add(animation, forKey: "opacity")
            }

            parentlayer.addSublayer(blackLayer)
        }        
        let layercomposition = AVMutableVideoComposition()
        layercomposition.frameDuration = CMTimeMake(1, 30)
        layercomposition.renderSize = size

        layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer)
        let instruction = AVMutableVideoCompositionInstruction()
        instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration)
        let videotrack = composition.tracks(withMediaType: AVMediaType.video)[0] as AVAssetTrack
        let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack)
        instruction.layerInstructions = [layerinstruction]
        layercomposition.instructions = [instruction]
        if(fromTransition){
            self.globalrVideoComposition = layercomposition
        }
        let animatedVideoURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/video2.mp4")
        self.removeFileAtURLIfExists(url: animatedVideoURL)

        guard let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetHighestQuality) else {return}
        assetExport.videoComposition = self.globalrVideoComposition
        assetExport.outputFileType = AVFileType.mp4
        assetExport.outputURL = animatedVideoURL as URL
        print("****** animatedVideoURL *****",animatedVideoURL)
        assetExport.exportAsynchronously(completionHandler: {
            switch assetExport.status{
            case  AVAssetExportSessionStatus.failed:
                print("failed \(String(describing: assetExport.error))")
            case AVAssetExportSessionStatus.cancelled:
                print("cancelled \(String(describing: assetExport.error))")
            default:
                print("Exported")

                if(self.fromPlayVideo){
                    DispatchQueue.main.async {

                        self.globalVideoURL = animatedVideoURL; self.playVideoInPlayer(animatedVideoURL: animatedVideoURL as URL)
                    }
                }else if(self.fromSave){

                    PHPhotoLibrary.shared().performChanges({
                        PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: animatedVideoURL as URL)
                        print("222222 animatedVideoURL",animatedVideoURL)



                    }) { saved, error in
                        DispatchQueue.main.async {
                            MBProgressHUD.hideAllHUDs(for: self.view, animated: true)
                        }

                        if saved {

                            let alertController = UIAlertController(title: "Your video was successfully saved", message: nil, preferredStyle: .alert)
                            let defaultAction = UIAlertAction(title: "OK", style: .default, handler: nil)
                            alertController.addAction(defaultAction)

                            print("The task is done,enjoy now!")
                            self.present(alertController, animated: true, completion: nil)
                        }else{

                        }
                    }
                }
            }
        })
    }

4)Воспроизведенное видео

func playVideoInPlayer(animatedVideoURL:URL){

        if(globalFilterName != nil){
            self.asset = AVAsset.init(url:animatedVideoURL as URL)
            let newPlayerItem = AVPlayerItem.init(asset:self.asset);
            newPlayerItem.videoComposition=globalrVideoComposition
            self.player = AVPlayer.init(playerItem:newPlayerItem)
        }else{
            let newPlayerItem = AVPlayerItem.init(url:animatedVideoURL)
            self.player = AVPlayer.init(playerItem:newPlayerItem)
        }

        NotificationCenter.default.addObserver(self, selector: #selector(self.finishedPlaying(_:)), name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object:nil)
        self.playerLayer = AVPlayerLayer.init(player:self.player)
        let width: CGFloat = self.videoContainerView.frame.size.width
        let height: CGFloat = self.videoContainerView.frame.size.height
        self.playerLayer.frame = CGRect(x: 0.0, y:0, width: width, height: height)
        self.playerLayer.backgroundColor = UIColor.black.cgColor
        self.playerLayer.videoGravity = .resizeAspectFill
        self.videoContainerView.layer.addSublayer( self.playerLayer)

        self.playPauseBtn.isHidden = false

        self.playPauseBtn.setImage(UIImage.init(named:"pause"), for:.normal)

        DispatchQueue.main.async {
            MBProgressHUD.hideAllHUDs(for:self.view, animated:true)
            self.player.play()
        }

    }

Вся эта задача прекрасно работает, единственная проблема в том, что для воспроизведения видео со всеми настройками требуется много времени (преобразование изображения в видео и добавление анимации).Пожалуйста, помогите мне сократить время, чтобы пользователю не пришлось долго ждать, когда он приземлится, чтобы воспроизвести видео после сбора изображений из средства выбора изображений.

Любая помощь или руководство будут высоко оценены. Заранее спасибо!

...