Я пытаюсь записать видео, отфильтрованное с помощью пользовательских фильтров CIF. И удалось создать файл и предварительный просмотр. Но размер экспортированного файла равен нулю и не может быть открыт Но в debbuger не возникло ошибок и не произошло сбоев. Почему так происходит?
Мой код здесь.
import UIKit
import AVFoundation
import RxCocoa
import RxSwift
class CameraViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
private let disposeBag = DisposeBag()
let camPreview = UIView()
let imageView = UIImageView()
var camera: AVCaptureDevice!
var videoInput: AVCaptureDeviceInput!
let captureSession = AVCaptureSession()
private lazy var videoOutput: AVCaptureVideoDataOutput! = {
let videoOutput = AVCaptureVideoDataOutput()
videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
videoOutput.alwaysDiscardsLateVideoFrames = true
return videoOutput
}()
var assetWriter: AVAssetWriter!
var videoAssetInput: AVAssetWriterInput!
var pixelBuffer: AVAssetWriterInputPixelBufferAdaptor!
var startTime: CMTime!
var endTime: CMTime!
var frameNumber: Int64 = 0
override func viewDidLoad() {
super.viewDidLoad()
self.view.backgroundColor = UIColor.white
imageView.backgroundColor = UIColor.groupTableViewBackground
imageView.contentMode = .scaleAspectFill
imageView.layer.masksToBounds = true
self.view.addSubview(imageView)
imageView.snp.makeConstraints { make in
make.edges.equalToSuperview()
}
self.view.layoutIfNeeded()
if setupSession() {
startSession()
}
let startButton = UIButton()
startButton.setTitle("startButton", for: .normal)
startButton.rx.controlEvent(.touchUpInside)
.asDriver()
.drive(onNext: { self.startRecording() })
.disposed(by: disposeBag)
self.view.addSubview(startButton)
startButton.snp.makeConstraints { make in
make.center.equalTo(self.view.snp.center)
}
let stopButton = UIButton()
stopButton.setTitle("stopButton", for: .normal)
stopButton.rx.controlEvent(.touchUpInside)
.asDriver()
.drive(onNext: { self.stopRecording() })
.disposed(by: disposeBag)
self.view.addSubview(stopButton)
stopButton.snp.makeConstraints { make in
make.centerX.equalTo(self.view.snp.centerX)
make.top.equalTo(startButton.snp.bottom).offset(30.0)
}
}
private func startRecording() {
let documentPath = NSHomeDirectory() + "/Documents/"
let filePath = documentPath + "video.mp4"
let fileURL = URL(fileURLWithPath: filePath)
let videoSettings = [
AVVideoWidthKey: 480,
AVVideoHeightKey: 640,
AVVideoCodecKey: AVVideoCodecType.h264
] as [String: Any]
videoAssetInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
pixelBuffer = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoAssetInput, sourcePixelBufferAttributes: [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)])
frameNumber = 0
do {
try assetWriter = AVAssetWriter(outputURL: fileURL, fileType: .mp4)
videoAssetInput.expectsMediaDataInRealTime = true
assetWriter.add(videoAssetInput)
assetWriter.startWriting()
assetWriter.startSession(atSourceTime: CMTime.zero)
print(#function)
} catch {
print(error)
}
}
private func stopRecording() {
if videoAssetInput == nil { return }
videoAssetInput.markAsFinished()
assetWriter.endSession(atSourceTime: endTime)
assetWriter.finishWriting {
self.videoAssetInput = nil
}
}
func setupSession() -> Bool {
captureSession.sessionPreset = AVCaptureSession.Preset.vga640x480
let camera = AVCaptureDevice.default(for: .video)
do {
let input = try AVCaptureDeviceInput(device: camera!)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
}
} catch {
print(error)
return false
}
if captureSession.canAddOutput(videoOutput) {
captureSession.addOutput(videoOutput)
}
return true
}
func startSession() {
if !captureSession.isRunning {
DispatchQueue.main.async {
self.captureSession.startRunning()
print(#function)
}
}
}
func stopSession() {
if captureSession.isRunning {
DispatchQueue.main.async {
self.captureSession.stopRunning()
}
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
autoreleasepool {
connection.videoOrientation = AVCaptureVideoOrientation.portrait
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
return
}
let cameraImage = CIImage(cvPixelBuffer: pixelBuffer)
let context = CIContext(options: nil)
let imageRef = context.createCGImage(cameraImage, from: cameraImage.extent)
let image = UIImage(cgImage: imageRef!)
DispatchQueue.main.async {
self.imageView.filter(_image: image)
guard
let videoAssetInput = self.videoAssetInput,
let displayedImage = self.imageView.image
else {
return
}
if !CMSampleBufferDataIsReady(sampleBuffer) {
return
}
if self.frameNumber == 0 {
self.startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
}
let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
let frameTime = CMTimeSubtract(timestamp, self.startTime)
if videoAssetInput.isReadyForMoreMediaData {
if let pxBuffer: CVPixelBuffer = self.buffer(from: displayedImage) {
self.pixelBuffer.append(pxBuffer, withPresentationTime: frameTime)
}
self.frameNumber += 1
}
self.endTime = frameTime
}
}
}
func buffer(from image: UIImage) -> CVPixelBuffer? {
let attrs = [kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue, kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue] as CFDictionary
var pixelBuffer : CVPixelBuffer?
let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(image.size.width), Int(image.size.height), kCVPixelFormatType_32ARGB, attrs, &pixelBuffer)
guard (status == kCVReturnSuccess) else {
return nil
}
CVPixelBufferLockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue: 0))
let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer!)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let context = CGContext(data: pixelData, width: Int(image.size.width), height: Int(image.size.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
context?.translateBy(x: 0, y: image.size.height)
context?.scaleBy(x: 1.0, y: -1.0)
UIGraphicsPushContext(context!)
image.draw(in: CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height))
UIGraphicsPopContext()
CVPixelBufferUnlockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue: 0))
return pixelBuffer
}
}
Извините, мои коды все еще беспорядочные, и возможен сбой. Но дело не в этом ...