Я пытаюсь Chroma-Key MP4 видео, воспроизводимое через UIView.
Я получаю текстуру каждый кадр примерно так:
var fullTex:CVMetalTexture?
let w = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
let h = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
videoTextureCache!,
pixelBuffer,
nil,
MTLPixelFormat.bgra8Unorm,
w, h, 0,
&fullTex)
let fullTexture:MTLTexture? = CVMetalTextureGetTexture(fullTex!)
Передав это моему шейдеру так:
let commandBuffer = commandQueue.makeCommandBuffer()!
let commandEncoder = commandBuffer.makeComputeCommandEncoder()!
commandEncoder.setComputePipelineState(pipelineState)
commandEncoder.setTexture(fullTexture, index: 0)
commandEncoder.setTexture(fullTexture, index: 1)
commandEncoder.dispatchThreadgroups(threadgroupsPerGrid, threadsPerThreadgroup: threadsPerThreadgroup)
commandEncoder.endEncoding()
commandBuffer.commit();
commandBuffer.waitUntilCompleted()
// This proves the shader created transparency, but it doesn't render as such on screen
let i = UIImage(pixelBuffer: pixelBuffer, context: CIContext())
//This writes to a AVSampleBufferDisplayLayer
self.displayPixelBuffer(pixelBuffer: pixelBuffer, atTime: outputItemTime)
Шейдер выглядит так:
kernel void GreenScreen(texture2d<float, access::read> inTexture [[texture(0)]],
texture2d<float, access::write> outTexture [[texture(1)]],
uint2 gid [[thread_position_in_grid]]) {
float4 color = inTexture.read(gid);
float4 combinedColor = float4(color.rgb,1.0);
if (color.r < 0.5)
combinedColor = float4(0.0,0,0,0);
outTexture.write(combinedColor, gid);
}
Показать так:
func displayPixelBuffer(pixelBuffer: CVPixelBuffer, atTime outputTime: CMTime) {
var err: OSStatus = noErr
if videoInfo == nil || false == CMVideoFormatDescriptionMatchesImageBuffer(videoInfo!, pixelBuffer) {
if videoInfo != nil {
videoInfo = nil
}
err = CMVideoFormatDescriptionCreateForImageBuffer(nil, pixelBuffer, &videoInfo)
if (err != noErr) {
DLog("Error at CMVideoFormatDescriptionCreateForImageBuffer \(err)")
}
}
var sampleTimingInfo = CMSampleTimingInfo(duration: kCMTimeInvalid, presentationTimeStamp: outputTime, decodeTimeStamp: kCMTimeInvalid)
var sampleBuffer: CMSampleBuffer?
err = CMSampleBufferCreateForImageBuffer(nil, pixelBuffer, true, nil, nil, videoInfo!, &sampleTimingInfo, &sampleBuffer)
if (err != noErr) {
DLog("Error at CMSampleBufferCreateForImageBuffer \(err)")
}
if sampleLayer.isReadyForMoreMediaData {
sampleLayer.enqueue(sampleBuffer!)
}
}
Пример слоя добавлен так:
private func setupVideoPlaybackLayer() {
self.view.isOpaque = false
self.view.layer.isOpaque = false
sampleLayer = AVSampleBufferDisplayLayer()
sampleLayer.backgroundColor = UIColor.green.cgColor
sampleLayer.frame = view.layer.frame
self.view.layer.addSublayer(sampleLayer)
}
Но когда фильм воспроизводится, области, которые я ожидаю сделать прозрачными, будут черными, а UIView за ним будет синим.
Есть идеи?