У меня очень странная ошибка, которая возникает только на случайных устройствах. Я использую avassetwriter для экспорта MOV ie и рисования / шейдера с помощью Metal, но я получаю некоторые цветные пиксельные артефакты в анимированном mp4 mov ie.
video: https://drive.google.com/file/d/1g6KyL18JqclOW1kVQkif3o3zaqylYaLk/view?usp=sharing
В изображении / кадре вы можете видеть цветные пиксели артефактов, это обычно происходит, когда линия перемещается (анимированная линия)
Есть идеи? спасибо
init
init?(outputURL url: URL, size: CGSize) {
do {
assetWriter = try AVAssetWriter(outputURL: url, fileType: .mp4)
} catch let error{
print(error)
return nil
}
let compressionProperties = NSDictionary(dictionary: [
AVVideoAverageBitRateKey:ClipSettings.bitrate
])
let outputSettings: [String: Any]
if #available(iOS 11.0, *),AVAssetExportSession.allExportPresets().contains(AVAssetExportPresetHEVCHighestQuality) {
outputSettings = [ AVVideoCodecKey : AVVideoCodecType.hevc,
AVVideoWidthKey : size.width,
AVVideoHeightKey : size.height,
AVVideoCompressionPropertiesKey:compressionProperties
]
} else {
// Fallback on earlier versions
outputSettings = [ AVVideoCodecKey : AVVideoCodecType.h264,
AVVideoWidthKey : size.width,
AVVideoHeightKey : size.height,
AVVideoCompressionPropertiesKey:compressionProperties
]
}
assetWriterVideoInput = AVAssetWriterInput(mediaType: .video, outputSettings: outputSettings)
assetWriterVideoInput.expectsMediaDataInRealTime = false
let sourcePixelBufferAttributes: [String: Any] = [
kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_32BGRA,
kCVPixelBufferWidthKey as String : size.width,
kCVPixelBufferHeightKey as String : size.height
]
assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterVideoInput,
sourcePixelBufferAttributes: sourcePixelBufferAttributes)
if assetWriter.canAdd(assetWriterVideoInput){
assetWriter.add(assetWriterVideoInput)
}else{
print("add failed")
}
так пишется рамка из металлической текстуры
func writeFrame(forTexture texture: MTLTexture, time: TimeInterval, podSticker: PodStickerView) {
if !isRecording {
return
}
let fps: Int32 = 60
let intervalDuration = CFTimeInterval(1.0 / Double(fps))
let timescale: Float = 600
let kTimescale: Int32 = Int32(timescale)
let frameDuration = CMTimeMake(
value: Int64( floor(timescale / Float(fps)) ),
timescale: kTimescale
)
var waitTime = 300.0 //fixes dropped frames
while !assetWriterVideoInput.isReadyForMoreMediaData {
let waitIntervale: TimeInterval = 0.001 * waitTime
let maxDate = Date(timeIntervalSinceNow: waitIntervale)
RunLoop.current.run(until: maxDate)
waitTime += 200.0 // add 200ms every time
}
guard let pixelBufferPool = assetWriterPixelBufferInput.pixelBufferPool else {
print("Pixel buffer asset writer input did not have a pixel buffer pool available; cannot retrieve frame")
return
}
var maybePixelBuffer: CVPixelBuffer? = nil
let status = CVPixelBufferPoolCreatePixelBuffer(nil, pixelBufferPool, &maybePixelBuffer)
if status != kCVReturnSuccess {
print("Could not get pixel buffer from asset writer input; dropping frame...")
return
}
guard let pixelBuffer = maybePixelBuffer else { return }
CVPixelBufferLockBaseAddress(pixelBuffer, [])
let pixelBufferBytes = CVPixelBufferGetBaseAddress(pixelBuffer)!
// Use the bytes per row value from the pixel buffer since its stride may be rounded up to be 16-byte aligned
let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer)
let region = MTLRegionMake2D(0, 0, texture.width, texture.height)
texture.getBytes(pixelBufferBytes, bytesPerRow: bytesPerRow, from: region, mipmapLevel: 0)
let presentationTime = CMTimeMultiply(frameDuration, multiplier: Int32(frameNumber))
Engine.renderTime = presentationTime.seconds
//write video
self.assetWriterPixelBufferInput.append(pixelBuffer, withPresentationTime: presentationTime)
CVPixelBufferUnlockBaseAddress(pixelBuffer,[])
//}}