Я пытаюсь обновить текст TextField после того, как переменная «scannedLocation» была изменена на «stringValue» в классе barcodeScanner, внизу кода. (TextField - это первый жирный шрифт / Itali c Text)
По сути, я сканирую штрих-код с помощью камеры и изменяю текст TextField на значение, которое было декодировано (stringValue), а затем запрашиваю TextField для автоматического обновления. В настоящее время при сканировании штрих-кода «stringValue» изменяется, но TextField не обновляется. ("stringValue" - это второй жирный шрифт / Itali c Text)
Если кто-нибудь может мне помочь, будет очень признателен! У меня изначально был TextField как Text, поэтому, если кто-то знает, как заставить эту работу работать с Text вместо TextField, было бы здорово.
import SwiftUI
import AVFoundation
var scannedLocation = ""
struct FirstView: View {
@State var RetrievedLocation: String = "Location Code: \(scannedLocation)"
@Environment(\.presentationMode) var presentationMode
var body: some View {
NavigationView {
VStack {
setupScannerProcess()
.frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .top)
.background(Color.green)
HStack {
***TextField("", text: $RetrievedLocation)***
.multilineTextAlignment(.center)
.navigationBarTitle(Text("Scan Location"), displayMode: .inline)
.frame(minWidth: 0, maxWidth: .infinity, minHeight: 0, maxHeight: 50, alignment: Alignment.center)
.border(Color.gray, width: 1)
.padding(EdgeInsets(top: -8, leading: 0, bottom: -8, trailing: -8))
}
//View Details
.frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .top)
//Navigation Bar
.navigationBarItems (leading: //Left Side
Button(action: {
self.RetrievedLocation = "Location Code: \(scannedLocation)"
}) {
HStack {
Image(systemName: "chevron.left").imageScale(.large)
Text("Change String")
}
}, trailing: //Right Side
Button(action: {
self.addRow()
}) {
HStack {
Text("Add List Item")
Image(systemName: "chevron.right").imageScale(.large)
}
})
}
}
}
struct setupScannerProcess: UIViewRepresentable {
func makeUIView(context: UIViewRepresentableContext<setupScannerProcess>) -> BarcodeScanner {
BarcodeScanner()
}
func updateUIView(_ uiView: BarcodeScanner, context: UIViewRepresentableContext<setupScannerProcess>) {
}
typealias UIViewType = BarcodeScanner
}
class BarcodeScanner: UIView, AVCaptureMetadataOutputObjectsDelegate, ObservableObject {
var captureSession: AVCaptureSession?
var previewLayer: AVCaptureVideoPreviewLayer!
private let supportedBarcodeTypes = [AVMetadataObject.ObjectType.upce,
AVMetadataObject.ObjectType.code39,
AVMetadataObject.ObjectType.code39Mod43,
AVMetadataObject.ObjectType.code93,
AVMetadataObject.ObjectType.code128,
AVMetadataObject.ObjectType.ean8,
AVMetadataObject.ObjectType.ean13,
AVMetadataObject.ObjectType.aztec,
AVMetadataObject.ObjectType.pdf417,
AVMetadataObject.ObjectType.itf14,
AVMetadataObject.ObjectType.dataMatrix,
AVMetadataObject.ObjectType.interleaved2of5,
AVMetadataObject.ObjectType.qr]
//Start Setup
init() {
super.init(frame: .zero)
var allowedAccess = false
let blocker = DispatchGroup()
blocker.enter()
AVCaptureDevice.requestAccess(for: .video) { flag in
allowedAccess = flag
blocker.leave()
}
blocker.wait()
if !allowedAccess {
print("Access has been denied to the camera. Please enable Camera Usage in settings.")
return
}
// Setup Session
let session = AVCaptureSession()
session.beginConfiguration()
let videoDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .unspecified)
guard videoDevice != nil, let videoDeviceInput = try? AVCaptureDeviceInput(device: videoDevice!), session.canAddInput(videoDeviceInput) else {
print("There is no camera detected.")
return
}
session.addInput(videoDeviceInput)
session.commitConfiguration()
self.captureSession = session
let metadataOutput = AVCaptureMetadataOutput()
if (session.canAddOutput(metadataOutput)) {
session.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = supportedBarcodeTypes
} else {
print("Metadata was not able to be exported.")
return
}
self.videoPreviewLayer.session = self.captureSession
self.videoPreviewLayer.videoGravity = .resizeAspectFill
self.captureSession?.startRunning()
}
override class var layerClass: AnyClass {
AVCaptureVideoPreviewLayer.self
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
var videoPreviewLayer: AVCaptureVideoPreviewLayer {
return layer as! AVCaptureVideoPreviewLayer
}
//Scanning Options
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
self.captureSession?.stopRunning()
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject else { return }
guard let stringValue = readableObject.stringValue else { return }
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
***scannedLocation = stringValue***
//Start Running After
self.captureSession?.startRunning()
}
}
}
struct ContentView_Previews : PreviewProvider {
static var previews: some View {
FirstView()
}
}