Я создал приложение для инвентаризации / управления оборудованием, которое включает в себя QR-сканер для проверки оборудования на входе / выходе. Это будет сканирование QR-кодов, которые содержат автоматически сгенерированные идентификаторы данных моего оборудования (например, -M89hHDwvkK27pO7Vc63), и я хотел бы выполнить код для обновления статуса и описания в моей базе данных Firebase.
В настоящий момент QR-сканер работает, и я могу распечатать идентификатор из QR-кода, и переход выполняется после сканера. Однако данные, похоже, не обновляются в Firebase.
Данные Firebase :
{
"Equipment" : {
"-M89hHDwvkK27pO7Vc63" : {
"cat" : "cat1",
"description" : "by A on 25/05/2020",
"id" : "Item6",
"status" : "in"
},
"-M89j3U504GYlJQyytlu" : {
"cat" : "cat1",
"description" : "by A on 25/05/2020",
"id" : "item7",
"status" : "in"
},
"-M9bIgc7GAj8kkdmYzzn" : {
"cat" : "cat2",
"description" : "by A on 12/06/2020",
"id" : "item10",
"status" : "in"
},
"-M9nvM1tMNNoU7_PoZNb" : {
"cat" : "cat3",
"description" : "by A on 15/06/2020",
"id" : "item1",
"status" : "in"
},
"-M9o5EGRLyEWrI9rPtU8" : {
"cat" : "cat3",
"description" : "by B on 15/06/2020",
"id" : "item 2",
"status" : "in"
},
ViewController для QR-сканера:
class QRScannerViewController: UIViewController, AVCaptureMetadataOutputObjectsDelegate {
@IBOutlet weak var previewView: UIView!
var imageOrientation: AVCaptureVideoOrientation?
var captureSession: AVCaptureSession?
var videoPreviewLayer: AVCaptureVideoPreviewLayer?
var capturePhotoOutput: AVCapturePhotoOutput?
var ref:DatabaseReference?
var databaseHandler:DatabaseHandle?
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
// Get an instance of the AVCaptureDevice class to initialize a
// device object and provide the video as the media type parameter
guard let captureDevice = AVCaptureDevice.default(for: AVMediaType.video) else {
fatalError("No video device found")
}
// handler chiamato quando viene cambiato orientamento
self.imageOrientation = AVCaptureVideoOrientation.portrait
do {
// Get an instance of the AVCaptureDeviceInput class using the previous deivce object
let input = try AVCaptureDeviceInput(device: captureDevice)
// Initialize the captureSession object
captureSession = AVCaptureSession()
// Set the input device on the capture session
captureSession?.addInput(input)
// Get an instance of ACCapturePhotoOutput class
capturePhotoOutput = AVCapturePhotoOutput()
capturePhotoOutput?.isHighResolutionCaptureEnabled = true
// Set the output on the capture session
captureSession?.addOutput(capturePhotoOutput!)
captureSession?.sessionPreset = .high
// Initialize a AVCaptureMetadataOutput object and set it as the input device
let captureMetadataOutput = AVCaptureMetadataOutput()
captureSession?.addOutput(captureMetadataOutput)
// Set delegate and use the default dispatch queue to execute the call back
captureMetadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
captureMetadataOutput.metadataObjectTypes = [AVMetadataObject.ObjectType.qr]
//Initialise the video preview layer and add it as a sublayer to the viewPreview view's layer
videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession!)
videoPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
videoPreviewLayer?.frame = view.layer.bounds
previewView.layer.addSublayer(videoPreviewLayer!)
//start video capture
captureSession?.startRunning()
} catch {
//If any error occurs, simply print it out
print(error)
return
}
}
override func viewWillAppear(_ animated: Bool) {
navigationController?.setNavigationBarHidden(true, animated: false)
self.captureSession?.startRunning()
}
// Find a camera with the specified AVCaptureDevicePosition, returning nil if one is not found
func cameraWithPosition(position: AVCaptureDevice.Position) -> AVCaptureDevice? {
let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .unspecified)
for device in discoverySession.devices {
if device.position == position {
return device
}
}
return nil
}
func metadataOutput(_ captureOutput: AVCaptureMetadataOutput,
didOutput metadataObjects: [AVMetadataObject],
from connection: AVCaptureConnection) {
// Check if the metadataObjects array is contains at least one object.
if metadataObjects.count == 0 {
return
}
//self.captureSession?.stopRunning()
// Get the metadata object.
let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
if metadataObj.type == AVMetadataObject.ObjectType.qr {
if let outputString = metadataObj.stringValue {
DispatchQueue.main.async {
print(outputString)
// self.updatestatus(itemid: outputString)
let userEmail = Auth.auth().currentUser?.email
let userName = userEmail?.replacingOccurrences(of: "@email.com", with: "")
let date = Date()
let formatter = DateFormatter()
formatter.dateFormat = "dd/MM/yyyy"
let currentDate = formatter.string(from: date)
let description = "by " + userName! + " on " + currentDate
var statusdata = "-"
let ref = Database.database().reference().child("Equipment").child(outputString)
self.databaseHandler = ref.observe(.value, with: { (snapshot) in
let snapshotValue = snapshot.value as! [String:String]
print(snapshotValue["status"]!)
statusdata = snapshotValue["status"]! as String
})
if statusdata == "out" {
statusdata = "in"
} else {
statusdata = "out"
}
Database.database().reference().child("Equipment").child(outputString).updateChildValues(["status" : statusdata,"description":description])
}
self.captureSession?.stopRunning()
self.performSegue(withIdentifier: "backtohome", sender: self)
}
}
}
}
спасибо за вашу помощь в заранее!