Я работаю над приложением, которому нужно AVCameraCalibrationData
. Мое приложение аварийно завершает работу, и из-за исключения.
AVCaptureDataOutputSynchronizer initWithDataOutputs:] Unsupported AVCaptureOutput in dataOutputs - <AVCapturePhotoOutput: 0x283d6ab80>'
Я пробовал другие обходные пути, но depthDataOutput
не вызывается. Пожалуйста, посмотрите на мою камеру configuration
. Любая помощь будет высоко ценится.
class ViewController: UIViewController {
@IBOutlet var image_view: UIImageView!
@IBOutlet var capture_button: UIButton!
var captureSession: AVCaptureSession?
var sessionOutput: AVCapturePhotoOutput?
var depthOutput: AVCaptureDepthDataOutput?
var previewLayer: AVCaptureVideoPreviewLayer?
var outputSynchronizer: AVCaptureDataOutputSynchronizer?
let dataOutputQueue = DispatchQueue(label: "data queue", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem)
@IBAction func capture(_ sender: Any) {
self.sessionOutput?.capturePhoto(with: AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg]), delegate: self)
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
self.previewLayer?.removeFromSuperlayer()
self.image_view.image = UIImage(data: photo.fileDataRepresentation()!)
print(photo.cameraCalibrationData) // is nil
let depth_map = photo.depthData?.depthDataMap
print("depth_map:", depth_map) // is nil
}
func depthDataOutput(_ output: AVCaptureDepthDataOutput, didOutput depthData: AVDepthData, timestamp: CMTime, connection: AVCaptureConnection) {
print("depth data") // never called
}
override func viewDidLoad() {
super.viewDidLoad()
self.captureSession = AVCaptureSession()
self.captureSession?.sessionPreset = .photo
self.sessionOutput = AVCapturePhotoOutput()
self.depthOutput = AVCaptureDepthDataOutput()
self.depthOutput?.setDelegate(self, callbackQueue: dataOutputQueue)
do {
let device = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back)
let input = try AVCaptureDeviceInput(device: device!)
if(self.captureSession?.canAddInput(input))!{
self.captureSession?.addInput(input)
if(self.captureSession?.canAddOutput(self.sessionOutput!))!{
self.captureSession?.addOutput(self.sessionOutput!)
if(self.captureSession?.canAddOutput(self.depthOutput!))!{
self.captureSession?.addOutput(self.depthOutput!)
self.previewLayer = AVCaptureVideoPreviewLayer(session: self.captureSession!)
self.previewLayer?.frame = self.image_view.bounds
self.previewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.previewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
self.image_view.layer.addSublayer(self.previewLayer!)
}
}
}
if sessionOutput!.isDepthDataDeliverySupported {
sessionOutput?.isDepthDataDeliveryEnabled = true
depthOutput?.connection(with: .depthData)!.isEnabled = true
depthOutput?.isFilteringEnabled = true
outputSynchronizer = AVCaptureDataOutputSynchronizer(dataOutputs: [sessionOutput!, depthOutput!]) // app crash on that line of code
outputSynchronizer!.setDelegate(self, queue: self.dataOutputQueue)
}
} catch {}
self.captureSession?.startRunning()
}
}
@available(iOS 11.0, *)
extension ViewController: AVCaptureDataOutputSynchronizerDelegate , AVCaptureDepthDataOutputDelegate, AVCapturePhotoCaptureDelegate {
@available(iOS 11.0, *)
func dataOutputSynchronizer(_ synchronizer: AVCaptureDataOutputSynchronizer, didOutput synchronizedDataCollection: AVCaptureSynchronizedDataCollection) {
}
}