Я не получаю depthData
, когда я делаю фотографию на iPhone X при первой съемке.
Разрешения камеры действительны
Если я запускаю код на iPhone X , сделайте снимок, без данных о глубине, если я коснусь еще раз, я получу данные о глубине. Если я добавлю задержку до того, как позвоню capturePhoto
, я получу depthData
Если я запусту код на iPhone XS , сделайте снимок, у меня есть данные глубины прямо сейчас
Что-то мне не хватает в моем коде?
import UIKit
import AVFoundation
import os.log
extension OSLog {
/// The subsystem for the app
public static var appSubsystem = "com.my.app"
/// Image processing category
static let imageService = OSLog(subsystem: OSLog.appSubsystem, category: "Image")
}
/**
Use this as a logging system, it's compiled out for release builds for now.
*/
func DLog(_ string: String, subsystem: OSLog, type: OSLogType = .debug) {
#if DEBUG
os_log("%{PUBLIC}@", log: subsystem, type: type, string)
#endif
}
class ViewController: UIViewController {
var photoProcessingQueue = DispatchQueue(label: "ProcessingQueue", attributes: [], autoreleaseFrequency: .workItem)
var photoCaptureOutput: AVCapturePhotoOutput!
var photoSession: AVCaptureSession!
var initalized: Bool = false
func setupCaptureSession() {
defer {
photoSession.commitConfiguration()
}
photoCaptureOutput = AVCapturePhotoOutput()
photoSession = AVCaptureSession()
DLog("setupCaptureSession \(String(describing: Thread.current))", subsystem: .imageService)
photoSession.beginConfiguration()
photoSession.sessionPreset = .photo
do {
guard let defaultCaptureDevice = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back) else {
DLog("can't find camera", subsystem: .imageService, type: .error)
return
}
let videoDeviceInput = try AVCaptureDeviceInput(device: defaultCaptureDevice)
if photoSession.canAddInput(videoDeviceInput) {
photoSession.addInput(videoDeviceInput)
} else {
DLog("can't add camera", subsystem: .imageService, type: .error)
return
}
if photoSession.canAddOutput(photoCaptureOutput) {
photoSession.addOutput(photoCaptureOutput)
photoCaptureOutput.isHighResolutionCaptureEnabled = true
photoCaptureOutput.isDepthDataDeliveryEnabled = photoCaptureOutput.isDepthDataDeliverySupported
photoCaptureOutput.maxPhotoQualityPrioritization = .quality
} else {
DLog("Could not add photo output to the session", subsystem: .imageService, type: .error)
}
} catch {
DLog("general camera error: \(error)", subsystem: .imageService, type: .error)
}
}
@IBAction func takePhoto(sender: UIButton) {
photoProcessingQueue.async {
if !self.initalized {
self.setupCaptureSession()
self.initalized = true
}
DLog("Photo session running: \(self.photoSession.isRunning) \(String(describing: Thread.current))", subsystem: .imageService)
self.photoSession.startRunning()
DLog("Photo session running: \(self.photoSession.isRunning) \(String(describing: Thread.current))", subsystem: .imageService)
let photoSettings: AVCapturePhotoSettings
if self.photoCaptureOutput.availablePhotoCodecTypes.contains(.jpeg) {
photoSettings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
} else {
photoSettings = AVCapturePhotoSettings()
}
photoSettings.flashMode = .auto
photoSettings.isDepthDataDeliveryEnabled = self.photoCaptureOutput.isDepthDataDeliverySupported
photoSettings.photoQualityPrioritization = .quality
// BUG iPhone X returns no depthData without a delay here...
//Thread.sleep(forTimeInterval: 0.1)
// Actually capture photo
self.photoCaptureOutput.capturePhoto(with: photoSettings, delegate: self)
}
}
}
extension ViewController : AVCapturePhotoCaptureDelegate {
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
DLog("photo.depthData?.depthDataMap: \(photo.depthData?.depthDataMap)", subsystem: .imageService)
}
}