Swift · получить ExposureTime из sampleBuffer - PullRequest
0 голосов
/ 20 апреля 2020

Я пытаюсь получить доступ к времени экспозиции из выборочного буфера. В этом примере я пытаюсь напечатать его, когда сработала кнопка photoBtnPressed .

Я попробовал этот код, но он не работает.

print(sampleBuffer!.attachments(kCGImagePropertyExifExposureTime))

Как вы можете видеть в моем коде, я также попробовал код QitVision :

let current_exposure_duration : CMTime = (captureDevice?.exposureDuration)!

Когда я распечатаю весь sampleBuffer, я получу эти точные значения выдержки времени и ISO:

[…]    
ISOSpeedRatings: 100, […] ExposureTime = "0.0303030303030303" 
[…]

Из текущего реализованного кода, основанного на коде QitVision

ISO: 102.745224, ExposureTime: 0.02998

Это мой код, основанный на примере mauricioconde :

import UIKit
import AVKit

class ViewController: UIViewController, UINavigationControllerDelegate, UIImagePickerControllerDelegate {


    @IBOutlet weak var photoPreviewImageView: UIImageView!


    var sampleBuffer: CMSampleBuffer!

    var previewView : UIView!
    var boxView:UIView!
    let myButton: UIButton = UIButton()

    //Camera Capture requiered properties
    var videoDataOutput: AVCaptureVideoDataOutput!
    var videoDataOutputQueue: DispatchQueue!
    var previewLayer:AVCaptureVideoPreviewLayer!
    var captureDevice : AVCaptureDevice!
    let session = AVCaptureSession()




    override func viewDidLoad() {
        super.viewDidLoad()

        self.setupAVCapture()
    }



    @IBAction func photoBtnPressed(_ sender: UIButton) {

        let current_exposure_duration : CMTime = (captureDevice?.exposureDuration)!
        let current_exposure_ISO : Float = (captureDevice?.iso)!
        let exposureTimeInSeconds = CMTimeGetSeconds(current_exposure_duration)

        print(sampleBuffer!)
        print(exposureTimeInSeconds)
        print(current_exposure_ISO)

    }


    override var shouldAutorotate: Bool {
        if (UIDevice.current.orientation == UIDeviceOrientation.landscapeLeft ||
            UIDevice.current.orientation == UIDeviceOrientation.landscapeRight ||
            UIDevice.current.orientation == UIDeviceOrientation.unknown) {
            return false
        }
        else {
            return true
        }
    }

}


// AVCaptureVideoDataOutputSampleBufferDelegate protocol and related methods
extension ViewController:  AVCaptureVideoDataOutputSampleBufferDelegate{


    func setupAVCapture(){
        session.sessionPreset = AVCaptureSession.Preset.vga640x480
        guard let device = AVCaptureDevice
            .default(AVCaptureDevice.DeviceType.builtInWideAngleCamera,
                     for: .video,
                     position: AVCaptureDevice.Position.back) else {
                        return
        }
        captureDevice = device

        beginSession()
    }

    func beginSession(){
        var deviceInput: AVCaptureDeviceInput!

        do {
            deviceInput = try AVCaptureDeviceInput(device: captureDevice)
            guard deviceInput != nil else {
                print("error: cant get deviceInput")
                return
            }

            if self.session.canAddInput(deviceInput){
                self.session.addInput(deviceInput)
            }

            videoDataOutput = AVCaptureVideoDataOutput()
            videoDataOutput.alwaysDiscardsLateVideoFrames=true
            videoDataOutputQueue = DispatchQueue(label: "VideoDataOutputQueue")
            videoDataOutput.setSampleBufferDelegate(self, queue:self.videoDataOutputQueue)

            if session.canAddOutput(self.videoDataOutput){
                session.addOutput(self.videoDataOutput)
            }

            videoDataOutput.connection(with: .video)?.isEnabled = true

            previewLayer = AVCaptureVideoPreviewLayer(session: self.session)
            previewLayer.videoGravity = AVLayerVideoGravity.resizeAspect

            let rootLayer :CALayer = self.photoPreviewImageView.layer
            rootLayer.masksToBounds=true
            previewLayer.frame = rootLayer.bounds
            rootLayer.addSublayer(self.previewLayer)
            session.startRunning()
        } catch let error as NSError {
            deviceInput = nil
            print("error: \(error.localizedDescription)")
        }
    }

    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        self.sampleBuffer = sampleBuffer
    }

    // clean up AVCapture
    func stopCamera(){
        session.stopRunning()
    }

}

I Я использую Xcode 11.4 Спасибо за вашу помощь!

...