В некоторых устройствах границы QR-кода немного смещены - PullRequest
0 голосов
/ 08 апреля 2019

Я использую AVFoundation для обнаружения QRCode. Я определяю qr-код и рисую с анимацией прямоугольник на границах, которые я получаю от функции делегата. На устройствах iPhone XS и XSMax коробка может быть не в центре.

Способ использования AVFoundation заключается в том, что я обнаруживаю qrcode, и с помощью функции делегата я отправляю границы прямоугольника metadataObject моему viewController. Когда границы находятся внутри моего targetView, я останавливаю мой captureSession и рисую прямоугольник в metadataObject.bounds. Я получаю на устройствах iPhone XS и XSMax коробку из центра. Я считаю, что это потому, что они очень быстрые и от обнаружения до captureSession.stopRunning () он запускает еще пару кадров с результатом, находящимся не в центре. Какие-либо предложения? Желательно ли просто взять изображение, когда qrcode находится в targetView, а затем проверить неподвижное изображение на qrcode и нарисовать рамку вокруг?

на моем CameraControllerQRCode

protocol CameraControllerQRCodeDelegate {
    func qrCodefound(qrCodeValue : String,bounds: CGRect)
}

class CameraControllerQRCode :NSObject{

    //MARK: - Properties

    var delegate : CameraControllerQRCodeDelegate?

    var captureSession : AVCaptureSession?

    var frontCamera: AVCaptureDevice?
    var rearCamera : AVCaptureDevice?

    var currentCameraPosition : CameraPosition?
    var frontCameraInput : AVCaptureDeviceInput?
    var rearCameraInput : AVCaptureDeviceInput?

    var rearCaptureInput : AVCaptureInput?
    var captureOutput : AVCaptureOutput?

    var photoOutput : AVCapturePhotoOutput?
    var previewLayer : AVCaptureVideoPreviewLayer?

    var connection : AVCaptureConnection?

    enum CameraControllerError : Swift.Error{
        case captureSessionAlreadyRunning
        case captureSessionIsMissing
        case inputsAreInvalid
        case outputsAreInvalid
        case invalidOperation
        case noCamerasAvailable
        case torchCouldNotBeUsed
        case unableToFocus
        case unknown
    }

    public enum CameraPosition {
        case front
        case rear
    }



    func prepare(completionHandler : @escaping (Error?) -> ()){

        func createCaptureSession(){
            self.captureSession = AVCaptureSession()
        }

        func configureCaptureDevices() throws {

            let session = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: .video, position: .back)

            let cameras = session.devices
            if cameras.isEmpty { throw CameraControllerError.noCamerasAvailable }

            for camera in cameras{

                print(camera.deviceType.rawValue)
                if camera.position == .front {
                    self.frontCamera = camera
                }

                if camera.position == .back {
                    self.rearCamera = camera

                    try camera.lockForConfiguration()
                    camera.activeVideoMaxFrameDuration = CMTimeMake(value: 1, timescale: 30)
                    camera.focusMode = .continuousAutoFocus
                    camera.unlockForConfiguration()
                    print(camera.activeVideoMaxFrameDuration)
                }
            }

        }

        func configureDeviceInputs() throws {
            guard let captureSession = self.captureSession else { throw CameraControllerError.captureSessionIsMissing}

            if let rearCamera = self.rearCamera {
                self.rearCameraInput = try AVCaptureDeviceInput(device: rearCamera)
                self.rearCaptureInput = try AVCaptureDeviceInput(device: rearCamera)

                if captureSession.canAddInput(self.rearCameraInput!){
                    captureSession.addInput(self.rearCameraInput!)
                }else{
                    throw CameraControllerError.inputsAreInvalid
                }

                self.currentCameraPosition = .rear

            }else if let frontCamera = self.frontCamera {
                self.frontCameraInput = try AVCaptureDeviceInput(device: frontCamera)

                if captureSession.canAddInput(self.frontCameraInput!){
                    captureSession.addInput(self.frontCameraInput!)
                }else{
                    throw CameraControllerError.inputsAreInvalid
                }
                self.currentCameraPosition = .front
            }else{
                throw CameraControllerError.noCamerasAvailable
            }
        }

        func configurePhotoOutput() throws {
            guard let captureSession = self.captureSession else {
                throw CameraControllerError.captureSessionIsMissing
            }

            //metadataOutput

            let metadataOutput = AVCaptureMetadataOutput()


            if captureSession.canAddOutput(metadataOutput){
                captureSession.addOutput(metadataOutput)
                metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
                metadataOutput.metadataObjectTypes = [.qr]
            }else{
                throw CameraControllerError.outputsAreInvalid
            }


            captureSession.startRunning()
        }

        DispatchQueue(label: "prepare").async {
            do{
                createCaptureSession()
                try configureCaptureDevices()
                try configureDeviceInputs()
                try configurePhotoOutput()
            }catch{
                DispatchQueue.main.async {
                    completionHandler(error)
                }
                return
            }

            DispatchQueue.main.async {
                completionHandler(nil)
            }

        }

    }

    func displayPreview(on view : UIView) throws {
        guard let captureSession = self.captureSession,captureSession.isRunning else {
            throw CameraControllerError.captureSessionIsMissing
        }

        self.previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        guard let previewLayer = self.previewLayer else {
            throw CameraControllerError.unknown
        }
        previewLayer.videoGravity = .resizeAspectFill
        previewLayer.connection?.videoOrientation = .portrait
        view.layer.insertSublayer(previewLayer, at: 0)
        previewLayer.frame = view.frame

    }
}
extension CameraControllerQRCode : AVCaptureMetadataOutputObjectsDelegate{

    func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
        print("QRCode Found")
        guard let captureSession = captureSession ,captureSession.isRunning else {
            print("CaptureSession nil")
            return
        }

        guard !metadataObjects.isEmpty else {
            return
        }

        guard let metadataObject = metadataObjects[0] as? AVMetadataMachineReadableCodeObject else {
            print("Error Delegate method 1")
            return
        }
        //metadataObject
        let qrCodeBounds = metadataObject.bounds

        guard let QRCodeRect = previewLayer?.layerRectConverted(fromMetadataOutputRect: qrCodeBounds) else{
            return
        }
        guard let stringValue = metadataObject.stringValue else {
            return
        }
        print(stringValue)
        guard let delegate = delegate else {
            print("Delegate : nil")
            return
        }


        delegate.qrCodefound(qrCodeValue: stringValue, bounds: QRCodeRect)


    }

На мой взгляд Контроллер

extension QRScannerController : CameraControllerQRCodeDelegate{
    func qrCodefound(qrCodeValue: String, bounds: CGRect) {


        if targetView.frame.contains(bounds){
            print("In to the IF")
            DispatchQueue(label: "prepare").async {
                self.cameraControllerQRCode.captureSession?.stopRunning()

            }
            self.scaningline.removeFromSuperview()
            AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
            self.messageLabel.text = qrCodeValue


            let bezierPath = UIBezierPath(rect: bounds)
            let shapeLayer = CAShapeLayer()
            shapeLayer.fillColor = UIColor.clear.cgColor
            shapeLayer.strokeColor = UIColor.red.cgColor
            shapeLayer.lineWidth = 2

            shapeLayer.path = bezierPath.cgPath
            let animation = CABasicAnimation(keyPath: "strokeEnd")
            animation.fromValue = 0.0
            animation.toValue = 1.0
            animation.duration = 0.5
            self.view.layer.addSublayer(shapeLayer)
            shapeLayer.add(animation, forKey: "drawLineAnimation")


        }

    }
}

Am Какие-либо предложения ?

Большое спасибо

...