Конвертировать изображения в видео с разной продолжительностью для отдельного изображения - PullRequest
0 голосов
/ 11 марта 2020

Я могу конвертировать в видео с изображениями. Я хочу добавить разную продолжительность для каждого изображения , поэтому я использую время вставки, добавляя предыдущее время, но получая правильную длительность 1-го кадра, но для других изображений длительность становится неправильной, а моя длительность составляет [10, 3, 5] для трех изображений. Для общей длительности с каждым кадром одинаково хорошо работает.

let lastFrameTime = CMTimeMake(frameCount * 3, fps)
                    let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, 3)
                    var pixelBuffer: CVPixelBuffer? = nil

Здесь я пытаюсь добавить другую продолжительность

    func buildVideoFromImageArray() {
                for image in 0..<3 {
                    selectedPhotosArray.append(UIImage(named: "\(image+1).jpg")!) //name of the images: 1.JPG, 2.JPG, 3.JPG, 4.JPG, 5.JPG
                }
                selectedPhotosArray = images
                imageArrayToVideoURL = outputURL
                removeFileAtURLIfExists(url: imageArrayToVideoURL)
            guard let videoWriter = try? AVAssetWriter(outputURL: imageArrayToVideoURL as URL, fileType: AVFileType.mp4) else {
                    fatalError("AVAssetWriter error")
                }
            let outputSettings = [AVVideoCodecKey : AVVideoCodecType.h264, AVVideoWidthKey : NSNumber(value: Float(outputSize.width)), AVVideoHeightKey : NSNumber(value: Float(outputSize.height))] as [String : Any]
            guard videoWriter.canApply(outputSettings: outputSettings, forMediaType: AVMediaType.video) else {
                    fatalError("Negative : Can't apply the Output settings...")
                }
            let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: outputSettings)
                let sourcePixelBufferAttributesDictionary = [kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32ARGB), kCVPixelBufferWidthKey as String: NSNumber(value: Float(outputSize.width)), kCVPixelBufferHeightKey as String: NSNumber(value: Float(outputSize.height))]
                let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)
                if videoWriter.canAdd(videoWriterInput) {
                    videoWriter.add(videoWriterInput)
                }
                if videoWriter.startWriting() {
                    let zeroTime = CMTimeMake(value: Int64(imagesPerSecond),timescale: Int32(1))
                    videoWriter.startSession(atSourceTime: zeroTime)

                    assert(pixelBufferAdaptor.pixelBufferPool != nil)
                    let media_queue = DispatchQueue(label: "mediaInputQueue")
                    videoWriterInput.requestMediaDataWhenReady(on: media_queue, using: { () -> Void in
                        let fps: Int32 = 1
                       // let framePerSecond: Int64 = Int64(self.imagesPerSecond)
                       // let frameDuration = CMTimeMake(value: Int64(self.imagesPerSecond), timescale: fps)
                        var frameCount: Int64 = 0
                        var appendSucceeded = true

                        var insertTime = CMTime.zero
// Different duration for each Image  
                        let framePerSeconds = [10, 3, 5]

                        while (!self.selectedPhotosArray.isEmpty) {
                            if (videoWriterInput.isReadyForMoreMediaData) {

                                let index = frameCount%Int64(framePerSeconds.count)
                                let duration = Int64(framePerSeconds[Int(index)])

                                let nextPhoto = self.selectedPhotosArray.remove(at: 0)
                                let lastFrameTime = CMTimeMake(value: frameCount * duration, timescale: fps)
                                let frameDuration = CMTimeMake(value: duration, timescale: fps)
                                let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(frameDuration, insertTime)
                                insertTime = CMTimeAdd(insertTime, frameDuration)
                                var pixelBuffer: CVPixelBuffer? = nil
                                let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferAdaptor.pixelBufferPool!, &pixelBuffer)
                                if let pixelBuffer = pixelBuffer, status == 0 {
                                    let managedPixelBuffer = pixelBuffer
                                    CVPixelBufferLockBaseAddress(managedPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
                                    let data = CVPixelBufferGetBaseAddress(managedPixelBuffer)
                                    let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
                                    let context = CGContext(data: data, width: Int(self.outputSize.width), height: Int(self.outputSize.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(managedPixelBuffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue)
                                    context!.clear(CGRect(x: 0, y: 0, width: CGFloat(self.outputSize.width), height: CGFloat(self.outputSize.height)))
                                    let horizontalRatio = CGFloat(self.outputSize.width) / nextPhoto.size.width
                                    let verticalRatio = CGFloat(self.outputSize.height) / nextPhoto.size.height
                                    //let aspectRatio = max(horizontalRatio, verticalRatio) // ScaleAspectFill
                                    let aspectRatio = min(horizontalRatio, verticalRatio) // ScaleAspectFit
                                    let newSize: CGSize = CGSize(width: nextPhoto.size.width * aspectRatio, height: nextPhoto.size.height * aspectRatio)
                                    let x = newSize.width < self.outputSize.width ? (self.outputSize.width - newSize.width) / 2 : 0
                                    let y = newSize.height < self.outputSize.height ? (self.outputSize.height - newSize.height) / 2 : 0
                                    context?.draw(nextPhoto.cgImage!, in: CGRect(x: x, y: y, width: newSize.width, height: newSize.height))
                                    CVPixelBufferUnlockBaseAddress(managedPixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
                                    appendSucceeded = pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime)
                                } else {
                                    print("Failed to allocate pixel buffer")
                                    appendSucceeded = false
                                }
                            }
                            if !appendSucceeded {
                                break
                            }
                            frameCount += 1
                        }
                        videoWriterInput.markAsFinished()
                        videoWriter.finishWriting { () -> Void in
                            print("-----video1 url = \(self.imageArrayToVideoURL)")
                            tempurl = self.imageArrayToVideoURL.description
                            self.asset = AVAsset(url: self.imageArrayToVideoURL as URL)
                            DispatchQueue.main.async {
                                let playerItem = AVPlayerItem(asset: self.asset)
                                let player = AVPlayer(playerItem: playerItem)
                                let playerController = AVPlayerViewController()
                                playerController.player = player

                                self.addChild(playerController)
                                self.videoview.addSubview(playerController.view)
                                playerController.view.frame.size=(self.videoview.frame.size)
                                playerController.view.contentMode = .scaleAspectFit
                                playerController.view.backgroundColor=UIColor.clear
                                self.videoview.backgroundColor=UIColor.clear
                                player.play()
                                print(player.currentItem?.duration)
                            }

                        }
                    })
                }
            }

            func removeFileAtURLIfExists(url: NSURL) {
                if let filePath = url.path {
                    let fileManager = FileManager.default
                    if fileManager.fileExists(atPath: filePath) {
                        do{
                            try fileManager.removeItem(atPath: filePath)
                        } catch let error as NSError {
                            print("Couldn't remove existing destination file: \(error)")
                        }
                    }
                }
            }
...