Я создаю приложение, которое позволяет пользователям выбирать видео из своей библиотеки и добавлять заголовок на экран, перетаскивать его туда, куда они хотят, в контейнер, в котором видео находится сзади, а заголовок сверху. Затем я хочу экспортировать это видео с надписью поверх видео.
Сначала я превращаю UIView с подписью в изображение, затем добавляю изображение в видео с помощью AVMutableComposition. Это работает с портретными видео, которые будут заполнять весь вид.
Проблема, с которой я сталкиваюсь, заключается в том, что в альбомной ориентации изображения заголовка разбиты. Мне нужно как-то всегда иметь видеокадр размером с вид заголовка и поместить видео поверх этого холста, а затем поместить заголовок поверх него.
import UIKit
import AVFoundation
import Photos
class VideoEditor {
func mergeVideoCaption(fromVideoAt videoURL: URL, videoRect: CGRect, captionImage: UIImage, onComplete: @escaping (URL?) -> Void) {
let asset = AVURLAsset(url: videoURL)
let composition = AVMutableComposition()
let captionSize = CGSize(width: captionImage.size.width * UIScreen.main.scale, height: captionImage.size.height * UIScreen.main.scale)
//let captionSize = CGSize(width: captionImage.size.width, height: captionImage.size.height)
//Create new mutable composition track from composition and create an asset track
guard let compositionTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid), let assetTrack = asset.tracks(withMediaType: .video).first else {
print("Something is wrong with the asset.")
onComplete(nil)
return
}
do {
//Set the time range for the composition video track based on asset duration
//Insert asset track into the compositionTrack
let timeRange = CMTimeRange(start: .zero, duration: asset.duration)
try compositionTrack.insertTimeRange(timeRange, of: assetTrack, at: .zero)
//create mutable audio track from composition and set time duration based on audio asset duration
//Insert audio track into compositionAudioTrack
if let audioAssetTrack = asset.tracks(withMediaType: .audio).first,
let compositionAudioTrack = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) {
try compositionAudioTrack.insertTimeRange(timeRange, of: audioAssetTrack, at: .zero)
}
} catch {
print(error)
onComplete(nil)
return
}
compositionTrack.preferredTransform = assetTrack.preferredTransform
let videoInfo = orientation(from: assetTrack.preferredTransform)
let videoSize: CGSize
if videoInfo.isPortrait {
videoSize = CGSize(
width: assetTrack.naturalSize.height,
height: assetTrack.naturalSize.width)
} else {
videoSize = assetTrack.naturalSize
}
//Create layers
let videoLayer = CALayer()
//videoLayer.frame = CGRect(origin: .zero, size: videoSize)
videoLayer.frame = CGRect(x: videoRect.origin.x * UIScreen.main.scale, y: videoRect.origin.y * UIScreen.main.scale, width: videoSize.width, height: videoSize.height)
print("video size: \(videoSize)")
print("video rect: \(videoRect)")
print("caption rect: \(captionSize)")
let overlayLayer = CALayer()
overlayLayer.frame = CGRect(origin: .zero, size: videoSize)
addImage(to: overlayLayer, image: captionImage, imageSize: videoSize)
let outputLayer = CALayer()
outputLayer.frame = CGRect(origin: .zero, size: videoSize)
outputLayer.backgroundColor = GoinOnColors.black.cgColor
let bgLayer = CALayer()
bgLayer.frame = CGRect(origin: .zero, size: videoSize)
bgLayer.backgroundColor = GoinOnColors.black.cgColor
outputLayer.addSublayer(bgLayer)
outputLayer.addSublayer(videoLayer)
outputLayer.addSublayer(overlayLayer)
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = videoSize
videoComposition.frameDuration = CMTime(value: 1, timescale: 30)
videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(
postProcessingAsVideoLayer: videoLayer,
in: outputLayer)
// Set the time range instruction for the video composition object
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRange(start: .zero, duration: composition.duration)
videoComposition.instructions = [instruction]
//
let layerInstruction = compositionLayerInstruction(
for: compositionTrack,
assetTrack: assetTrack)
instruction.layerInstructions = [layerInstruction]
guard let export = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality) else {
print("Cannot create export session.")
onComplete(nil)
return
}
let videoName = UUID().uuidString
let exportURL = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent(videoName).appendingPathExtension("mov")
export.videoComposition = videoComposition
export.outputFileType = .mov
export.outputURL = exportURL
export.exportAsynchronously {
DispatchQueue.main.async {
switch export.status {
case .completed:
onComplete(exportURL)
//PHPhotoLibrary.shared().performChanges({
//PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: exportURL)
//})
default:
print("Something went wrong during export.")
print(export.error ?? "unknown error")
onComplete(nil)
break
}
}
}
}
private func addImage(to layer: CALayer, image: UIImage, imageSize: CGSize) {
let imageLayer = CALayer()
imageLayer.frame = CGRect(
x: 0,
y: 0,
width: imageSize.width,
height: imageSize.height)
imageLayer.contents = image.cgImage
layer.addSublayer(imageLayer)
}
private func orientation(from transform: CGAffineTransform) -> (orientation: UIImage.Orientation, isPortrait: Bool) {
var assetOrientation = UIImage.Orientation.up
var isPortrait = false
if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
assetOrientation = .right
isPortrait = true
} else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
assetOrientation = .left
isPortrait = true
} else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
assetOrientation = .up
} else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
assetOrientation = .down
}
return (assetOrientation, isPortrait)
}
private func compositionLayerInstruction(for track: AVCompositionTrack, assetTrack: AVAssetTrack) -> AVMutableVideoCompositionLayerInstruction {
let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
let transform = assetTrack.preferredTransform
instruction.setTransform(transform, at: .zero)
return instruction
}
}
// Вот как Я называю это
let captionImage = UIImage(view: self.editCaptionMasterContainer)
let editor = VideoEditor()
editor.mergeVideoCaption(fromVideoAt: url, videoRect: self.videoPartial.avPlayerLayer!.videoRect, captionImage: captionImage)
Примеры снимков экрана