Я делаю заявку на считывание штрих-кодов и столкнулся с проблемой: считывается только высококачественный штрих-код. Если есть небольшое замятие или немного замято, оно больше не читается. Можете ли вы рассказать нам, как решить эту проблему?
Код ниже
import UIKit
import AVFoundation
import AudioToolbox
import CoreData
class ScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
@IBOutlet weak var infoLbl: UILabel!
@IBOutlet weak var barcodeButton1: UIButton!
@IBOutlet weak var blueButton: UIButton!
let systemSoundId : SystemSoundID = 1016
//captureSession manages capture activity and coordinates between input device and captures outputs
var captureSession:AVCaptureSession?
var videoPreviewLayer:AVCaptureVideoPreviewLayer?
//Empty Rectangle with green border to outline detected QR or BarCode
let codeFrame:UIView = {
let codeFrame = UIView()
codeFrame.layer.borderColor = UIColor.red.cgColor
codeFrame.layer.borderWidth = 1.5
codeFrame.frame = CGRect.zero
codeFrame.translatesAutoresizingMaskIntoConstraints = false
return codeFrame
}()
override func viewDidDisappear(_ animated: Bool) {
captureSession?.stopRunning()
}
override func viewDidAppear(_ animated: Bool) {
captureSession?.startRunning()
}
func toggleTorch(on: Bool) {
guard
let device = AVCaptureDevice.default(for: AVMediaType.video),
device.hasTorch
else { return }
do {
try device.lockForConfiguration()
device.torchMode = on ? .on : .off
device.unlockForConfiguration()
} catch {
print("Torch could not be used")
}
}
var lightOnTouch = false
@IBAction func lightingOn(_ sender: Any) {
if !lightOnTouch {
toggleTorch(on: true)
lightOnTouch = true
}
else {
toggleTorch(on: false)
lightOnTouch = false
}
}
override func viewDidLoad() {
super.viewDidLoad()
view?.backgroundColor = UIColor(white: 1, alpha: 0.5)
view?.backgroundColor = UIColor.black.withAlphaComponent(0.5)
//AVCaptureDevice allows us to reference a physical capture device (video in our case)
let captureDevice = AVCaptureDevice.default(for: AVMediaType.video)
if let captureDevice = captureDevice {
do {
/*
// Начало автофокуса
let w = UIScreen.main.bounds.width
let h = UIScreen.main.bounds.height
try! captureDevice.lockForConfiguration()
if captureDevice.isFocusPointOfInterestSupported {
//Add Focus on Point
captureDevice.focusPointOfInterest = CGPoint(x: w / 2, y: h / 2)
captureDevice.focusMode = AVCaptureDevice.FocusMode.autoFocus
}
if captureDevice.isExposurePointOfInterestSupported{
//Add Exposure on Point
captureDevice.exposurePointOfInterest = CGPoint(x: w / 2, y: h / 2)
captureDevice.exposureMode = AVCaptureDevice.ExposureMode.autoExpose
}
captureDevice.unlockForConfiguration()
// Конец автофокуса
*/
captureSession = AVCaptureSession()
// CaptureSession needs an input to capture Data from
let input = try AVCaptureDeviceInput(device: captureDevice)
captureSession?.addInput(input)
// CaptureSession needs and output to transfer Data to
let captureMetadataOutput = AVCaptureMetadataOutput()
captureSession?.addOutput(captureMetadataOutput)
//We tell our Output the expected Meta-data type
captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
captureMetadataOutput.metadataObjectTypes = [.code128, .ean13, .ean8, .code39, .upce, .code93, .code39Mod43, .itf14] //AVMetadataObject.ObjectType
captureSession?.startRunning()
//The videoPreviewLayer displays video in conjunction with the captureSession
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession!)
videoPreviewLayer?.videoGravity = .resizeAspectFill
videoPreviewLayer?.frame = view.layer.bounds
view.layer.addSublayer(videoPreviewLayer!)
view.bringSubviewToFront(infoLbl)
view.bringSubviewToFront(barcodeButton1)
}
catch {
print("Error")
}
}
}
// the metadataOutput function informs our delegate (the ScannerViewController) that the captureOutput emitted a new metaData Object
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
if metadataObjects.count == 0 {
print("no objects returned")
return
}
let metaDataObject = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
guard let StringCodeValue = metaDataObject.stringValue else {
return
}
view.addSubview(codeFrame)
//transformedMetaDataObject returns layer coordinates/height/width from visual properties
guard let metaDataCoordinates = videoPreviewLayer?.transformedMetadataObject(for: metaDataObject) else {
return
}
//Those coordinates are assigned to our codeFrame
codeFrame.frame = metaDataCoordinates.bounds
AudioServicesPlayAlertSound(systemSoundId)
infoLbl.text = StringCodeValue
if URL(string: StringCodeValue) != nil {
performSegue(withIdentifier: "seg1", sender: self)
captureSession?.stopRunning()
}
}
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
if let nextVC = segue.destination as? DetailsViewController {
nextVC.scannedCode = infoLbl.text
}
}
}
Читает:
Не читается:
Скажем так: 10% продуктов в продуктовом магазине не читаются
Пожалуйста, помогите мне?