Swift: создайте файл mov ie, используя изображения на macOS - PullRequest
0 голосов
/ 28 марта 2020

Я использую следующий код для преобразования изображения (файл png) в видео (файл mov) на MacOS. Target fps = 30.0, одно и то же изображение следует повторить 60 раз, чтобы создать файл mov ie длиной 2 секунды. Я получаю файл MOV размером 0 байт, когда я запускаю это. Можете ли вы сказать мне, что я делаю здесь не так?

func writeImagesAsMovie(image: NSImage, videoPath: String, videoSize: CGSize, videoFPS: Int32) {
    // Create AVAssetWriter to write video
    guard let assetWriter = createAssetWriter(videoPath, size: videoSize) else {
        print("Error converting images to video: AVAssetWriter not created")
        return
    }

    // If here, AVAssetWriter exists so create AVAssetWriterInputPixelBufferAdaptor
    let writerInput = assetWriter.inputs.filter{ $0.mediaType == AVMediaType.video }.first!
    let sourceBufferAttributes : [String : AnyObject] = [
        kCVPixelBufferPixelFormatTypeKey as String : Int(kCVPixelFormatType_32ARGB) as AnyObject,
        kCVPixelBufferWidthKey as String : videoSize.width as AnyObject,
        kCVPixelBufferHeightKey as String : videoSize.height as AnyObject,
        ]
    let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: writerInput, sourcePixelBufferAttributes: sourceBufferAttributes)

    // Start writing session
    assetWriter.startWriting()
    assetWriter.startSession(atSourceTime: CMTime.zero)
    if (pixelBufferAdaptor.pixelBufferPool == nil) {
        print("Error converting images to video: pixelBufferPool nil after starting session")
        return
    }

    // -- Create queue for <requestMediaDataWhenReadyOnQueue>
    let mediaQueue = DispatchQueue(label: "mediaInputQueue", attributes: [])

    // -- Set video parameters
    let frameDuration = CMTimeMake(value: 1, timescale: videoFPS)
    var frameCount = 0

    // -- Add images to video
    let numImages = 60
    writerInput.requestMediaDataWhenReady(on: mediaQueue, using: { () -> Void in
        // Append unadded images to video but only while input ready

        while (writerInput.isReadyForMoreMediaData && frameCount < numImages) {
            let lastFrameTime = CMTimeMake(value: Int64(frameCount), timescale: videoFPS)
            let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration)

            if !appendPixelBufferForImageAtURL(image, pixelBufferAdaptor: pixelBufferAdaptor, presentationTime: presentationTime) {
                print("Error converting images to video: AVAssetWriterInputPixelBufferAdapter failed to append pixel buffer")
                return
            }

            frameCount += 1
        }

        // No more images to add? End video.

        if (frameCount >= numImages) {
            writerInput.markAsFinished()
            assetWriter.finishWriting {
                if (assetWriter.error != nil) {
                    print("Error converting images to video: \(assetWriter.error)")
                } else {
                    print("Converted images to movie @ \(videoPath)")
                }
            }
        }
    })
}


func createAssetWriter(_ path: String, size: CGSize) -> AVAssetWriter? {
    // Convert <path> to NSURL object

    let pathURL = URL(fileURLWithPath: path)

    // Return new asset writer or nil

    do {
        // Create asset writer

        let newWriter = try AVAssetWriter(outputURL: pathURL, fileType: AVFileType.mov)

        // Define settings for video input

        let videoSettings: [String : AnyObject] = [
            AVVideoCodecKey  : AVVideoCodecH264 as AnyObject,
            AVVideoWidthKey  : size.width as AnyObject,
            AVVideoHeightKey : size.height as AnyObject,
            ]

        // Add video input to writer

        let assetWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: videoSettings)
        newWriter.add(assetWriterVideoInput)

        // Return writer

        print("Created asset writer for \(size.width)x\(size.height) video")
        return newWriter
    } catch {
        print("Error creating asset writer: \(error)")
        return nil
    }
}


func appendPixelBufferForImageAtURL(_ image: NSImage, pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor, presentationTime: CMTime) -> Bool {
    var appendSucceeded = false

    autoreleasepool {
        if  let pixelBufferPool = pixelBufferAdaptor.pixelBufferPool {
            let pixelBufferPointer = UnsafeMutablePointer<CVPixelBuffer?>.allocate(capacity:1)
            let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(
                kCFAllocatorDefault,
                pixelBufferPool,
                pixelBufferPointer
            )

            if let pixelBuffer = pixelBufferPointer.pointee , status == 0 {
                fillPixelBufferFromImage(image, pixelBuffer: pixelBuffer)
                appendSucceeded = pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime)
                pixelBufferPointer.deinitialize(count: 1)
            } else {
                NSLog("Error: Failed to allocate pixel buffer from pool")
            }

            pixelBufferPointer.deallocate()
        }
    }

    return appendSucceeded
}


func fillPixelBufferFromImage(_ image: NSImage, pixelBuffer: CVPixelBuffer) {
    CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))

    let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer)
    let rgbColorSpace = CGColorSpaceCreateDeviceRGB()

    // Create CGBitmapContext

    let context = CGContext(
        data: pixelData,
        width: Int(image.size.width),
        height: Int(image.size.height),
        bitsPerComponent: 8,
        bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer),
        space: rgbColorSpace,
        bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue
    )!

    // Draw image into context

    let drawCGRect = CGRect(x:0, y:0, width:image.size.width, height:image.size.height)
    var drawRect = NSRectFromCGRect(drawCGRect);
    let cgImage = image.cgImage(forProposedRect: &drawRect, context: nil, hints: nil)!
    context.draw(cgImage, in: CGRect(x: 0.0,y: 0.0,width: image.size.width,height: image.size.height))

    CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
}

// load image and convert to video

let fileUrl = URL(fileURLWithPath: "/Users/pranav/Downloads/image.png")
let image = NSImage(data: try Data(contentsOf: fileUrl))
let mysize = image!.size

writeImagesAsMovie(image: image!, videoPath: "/Users/pranav/Downloads/myvideo.mov", videoSize: mysize, videoFPS: 30)

Вывод я получаю:

2020-03-28 13:47:59.768720-0700 ImageToMovie[83849:3231783] CDN - client insert callback function client = 0 type = 17 function = 0x7fff3a09610e local_olny = false
2020-03-28 13:47:59.768791-0700 ImageToMovie[83849:3231783] CDN - client setup_remote_port
2020-03-28 13:47:59.768831-0700 ImageToMovie[83849:3231783] CDN - Bootstrap Port: 1799
2020-03-28 13:47:59.769003-0700 ImageToMovie[83849:3231783] CDN - Remote Port: 7171 (com.apple.CoreDisplay.Notification)
2020-03-28 13:47:59.769055-0700 ImageToMovie[83849:3231783] CDN - client setup_local_port
2020-03-28 13:47:59.769083-0700 ImageToMovie[83849:3231783] CDN - Local Port: 7427
2020-03-28 13:47:59.812665-0700 ImageToMovie[83849:3231783] Metal API Validation Enabled
2020-03-28 13:47:59.922385-0700 ImageToMovie[83849:3231783] GVA info: preferred scaler idx 1
Created asset writer for 640.0x325.0 video
2020-03-28 13:48:00.081358-0700 ImageToMovie[83849:3231783] GVA info: preferred scaler idx 1
Program ended with exit code: 0
...