AVFoundation: setCropRectangle разбивает видеокадры. стриж - PullRequest
0 голосов
/ 07 мая 2018

Я занимаюсь редактированием видео на холсте, где видео можно разместить на рамке холста и за ее пределами. На данный момент проблема в том, что когда я добавляю setCropRectangle, видео отображается неправильно, если я не добавляю setCropRectangle, видео отображается хорошо, и кадрирование по x или y также работает хорошо.

     open func cropFrame(_ videoViewFrame: CGRect, visibleFrame: CGRect, videoScrollFrame: CGRect, center: CGPoint, specificScale: CGFloat, url: URL) -> CGRect {
            debugPrint("videoViewFrame", videoViewFrame)
            debugPrint("visibleFrame", visibleFrame)
            let videoAsset = AVAsset(url: url)
            let videoTrack = videoAsset.tracks(withMediaType: AVMediaType.video).first
            // TODO: - Think about it
            let videoTrackSize = videoTrack?.resolution() ?? CGSize.zero

            let scaledSize = videoTrackSize.aspectFillIn(containerSize: visibleFrame.size)
            let scaledSizeWidth = scaledSize.width / videoTrackSize.width
            let scaledSizeHeight = scaledSize.height / videoTrackSize.height

            let renderSize = CGSize(width: visibleFrame.width, height: visibleFrame.height)
            debugPrint("renderSize init", renderSize)

            var scale = max(scaledSizeWidth, scaledSizeHeight)
            let videoViewPoint = videoViewFrame.origin

            var originalVideoX: CGFloat = 0
            var originalVideoY: CGFloat = 0
            var originalVideoWidth: CGFloat = 0
            var originalVideoHeight: CGFloat = 0

            if videoViewPoint.x < 0 {
                originalVideoX = videoViewPoint.x / scaledSizeWidth / specificScale
            }
            if videoViewPoint.y < 0 {
                originalVideoY = videoViewPoint.y / scaledSizeHeight / specificScale
            }

            /// We count on how much the video is hidden in the positive
            /// width
            if 0 <= originalVideoX {
                var projectionWidth: CGFloat = 0
                if 0 <= videoViewFrame.origin.x && (videoViewFrame.origin.x + videoViewFrame.width) > visibleFrame.width {
                    projectionWidth = visibleFrame.width - videoViewFrame.origin.x
                } else {
                    originalVideoWidth = videoTrackSize.width
                }
                if projectionWidth < 0 {
                    originalVideoWidth = 0
                } else if projectionWidth > 0 {
                    // calculate
                    let notVisibleWidth = videoViewFrame.width - projectionWidth
                    let scaleVideoWidth = videoViewFrame.width - notVisibleWidth
                    originalVideoWidth = scaleVideoWidth / specificScale / scale
                }
                originalVideoWidth = originalVideoWidth - originalVideoX
            } else {
                // full size - x - width remainder
                let rightInvisibleWidth = videoViewFrame.width - abs(videoViewFrame.origin.x) - visibleFrame.width
                if rightInvisibleWidth <= 0 {
                    originalVideoWidth = (videoViewFrame.width - abs(videoViewFrame.origin.x)) / scale / specificScale
                } else {
                    originalVideoWidth = (videoViewFrame.width - abs(videoViewFrame.origin.x) - rightInvisibleWidth) / scale / specificScale
                }
            }

            // height

            if 0 <= originalVideoY {
                var projectionHeight: CGFloat = 0
                if 0 <= videoViewFrame.origin.y && (videoViewFrame.origin.y + videoViewFrame.height) > visibleFrame.height {
                    projectionHeight = visibleFrame.height - videoViewFrame.origin.y
                } else {
                    originalVideoHeight = videoTrackSize.height
                }
                if projectionHeight < 0 {
                    originalVideoHeight = 0
                } else if projectionHeight > 0 {
                    // calculate
                    let notVisibleHeight = videoViewFrame.height - projectionHeight
                    let scaleVideoHeight = videoViewFrame.height - notVisibleHeight
                    originalVideoHeight = scaleVideoHeight / specificScale / scale
                }
                originalVideoHeight = originalVideoHeight - originalVideoY
            } else {
                // full size - x - width remainder
                let topInvisibleHeight = videoViewFrame.height - abs(videoViewFrame.origin.y) - visibleFrame.height
                debugPrint("topInvisibleHeight", topInvisibleHeight)
                if topInvisibleHeight <= 0 {
                    originalVideoHeight = (videoViewFrame.height - abs(videoViewFrame.origin.y)) / scale / specificScale
                } else {
                    originalVideoHeight = (videoViewFrame.height - abs(videoViewFrame.origin.y) - topInvisibleHeight) / scale / specificScale
                }
            }
            let cropFrame = CGRect(x: originalVideoX, y: originalVideoY, width: originalVideoWidth, height: originalVideoHeight)
            debugPrint("cropFrame", cropFrame)

            scale *= specificScale

            self.export(videoAsset, videoTrack: videoTrack!, cropFrame: cropFrame, renderSize: renderSize, scale: scale, customCenter: .zero)

            return cropFrame
        }

  private func aspectFillScale(for size: CGSize, in containerSize: CGSize) -> CGFloat {
        let widthRatio = containerSize.width  / size.width
        let heightRatio = containerSize.height / size.height
        return max(widthRatio, heightRatio)
    }


    open func transform(_ cropFrame: CGRect, videoTrack: AVAssetTrack, scale: CGFloat, customCenter: CGPoint) -> CGAffineTransform {
        let offset = CGPoint(x: cropFrame.origin.x, y: cropFrame.origin.y) // where minus
        let rotation = atan2(videoTrack.preferredTransform.b, videoTrack.preferredTransform.a)

        var rotationOffset = CGPoint(x: 0, y: 0)

        if videoTrack.preferredTransform.b == -1.0 {
            rotationOffset.y = videoTrack.naturalSize.width
        } else if videoTrack.preferredTransform.c == -1.0 {
            rotationOffset.x = videoTrack.naturalSize.height
        } else if videoTrack.preferredTransform.a == -1.0 {
            rotationOffset.x = videoTrack.naturalSize.width
            rotationOffset.y = videoTrack.naturalSize.height
        }

        debugPrint("offset", offset)

        var transform = CGAffineTransform.identity
        transform = transform.scaledBy(x: scale, y: scale)
        transform = transform.translatedBy(x: (offset.x + rotationOffset.x) + customCenter.x, y: (offset.y + rotationOffset.y) + customCenter.y)
        transform = transform.rotated(by: rotation)

//        print("track size \(videoTrack.naturalSize)")
//        print("preferred Transform = \(videoTrack.preferredTransform)")
//        print("rotation angle \(rotation)")
//        print("rotation offset \(rotationOffset)")
        print("actual Transform = \(transform)")

        return transform
    }

private func export(_ asset: AVAsset, videoTrack: AVAssetTrack, cropFrame: CGRect, renderSize: CGSize, scale: CGFloat, customCenter: CGPoint) {
        let renderWidth = renderSize.width - renderSize.width.truncatingRemainder(dividingBy: 16)
        let renderHeight = renderSize.height - renderSize.height.truncatingRemainder(dividingBy: 16)
        let actualRenderSize = CGSize(width: renderWidth, height: renderHeight)
        debugPrint("actualRenderSize", actualRenderSize)

        let assetComposition = AVMutableComposition()
        let frame1Time = CMTime(seconds: videoTrack.timeRange.duration.seconds, preferredTimescale: asset.duration.timescale)
        let trackTimeRange = CMTimeRangeMake(kCMTimeZero, frame1Time)

        guard let videoCompositionTrack = assetComposition.addMutableTrack(withMediaType: .video,
                                                                           preferredTrackID: kCMPersistentTrackID_Invalid) else {
                                                                            return
        }

        do {
            try videoCompositionTrack.insertTimeRange(trackTimeRange, of: videoTrack, at: kCMTimeZero)
        } catch {
            debugPrint(error.localizedDescription)
        }

        if let audioTrack = asset.tracks(withMediaType: AVMediaType.audio).first {
            let audioCompositionTrack = assetComposition.addMutableTrack(withMediaType: AVMediaType.audio,
                                                                         preferredTrackID: kCMPersistentTrackID_Invalid)
            do {
                try audioCompositionTrack?.insertTimeRange(trackTimeRange, of: audioTrack, at: kCMTimeZero)
            } catch {
                debugPrint(error.localizedDescription)
            }
        }

        //1. Create the instructions
        let mainInstructions = AVMutableVideoCompositionInstruction()
        mainInstructions.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration)

        //2 add the layer instructions
        let layerInstructions = AVMutableVideoCompositionLayerInstruction(assetTrack: videoCompositionTrack)

        let transform = self.transform(cropFrame, videoTrack: videoTrack, scale: scale, customCenter: customCenter)

        layerInstructions.setCropRectangle(cropFrame, at: kCMTimeZero)
        layerInstructions.setTransform(transform, at: kCMTimeZero)
        layerInstructions.setOpacity(1.0, at: kCMTimeZero)
        mainInstructions.layerInstructions = [layerInstructions]

        //3 Create the main composition and add the instructions

        let videoComposition = AVMutableVideoComposition()
        videoComposition.renderSize = actualRenderSize
        videoComposition.instructions = [mainInstructions]
        videoComposition.frameDuration = CMTimeMake(1, 30)


        let url = URL(fileURLWithPath: "\(NSTemporaryDirectory())TrimmedMovie.mov")
        try? FileManager.default.removeItem(at: url)

        let exportSession = AVAssetExportSession(asset: assetComposition, presetName: AVAssetExportPresetHighestQuality)
        exportSession?.outputFileType = .mov
        exportSession?.videoComposition = videoComposition
        exportSession?.outputURL = url
        exportSession?.exportAsynchronously(completionHandler: {
            DispatchQueue.main.async {
                if let url = exportSession?.outputURL, exportSession?.status == .completed {
                    debugPrint("success export")
                    UISaveVideoAtPathToSavedPhotosAlbum(url.path, nil, nil, nil)
                } else {
                    let error = exportSession?.error
                    print("error exporting video \(String(describing: error))")
                }
            }
        })
    }
...