Приложение машинного обучения совместимо с iPhone 5S? - PullRequest
0 голосов
/ 09 декабря 2018

import UIKit import AVKit import Vision

class ViewController класса: UIViewController, AVCaptureAudioDataOutputSampleBufferDelegate {

@IBOutlet weak var resultLabel: UILabel!

@IBOutlet weak var observeButtonOutlet: UIButton!

var captureSession: AVCaptureSession!
var observing = false


override func viewDidLoad() {
    super.viewDidLoad()
    // Do any additional setup after loading the view, typically from a nib.

    captureSession = AVCaptureSession()
    setUpCapture()
}


//Mark: IBActions
@IBAction func observeButtonPressed(_ sender: Any) {

    observing = !observing

    if observing {
        observeButtonOutlet.setTitle("Stop", for: .normal)
        startCapturing()
    } else {
        observeButtonOutlet.setTitle("Observe", for: .normal)
        stopCapturing()
    }
}

func startCapturing() {

    captureSession.startRunning()

}

func stopCapturing() {

    captureSession.stopRunning()

}

func setUpCapture() {

    captureSession.sessionPreset = .photo

    let captureDevice = AVCaptureDevice.default(for: .video)
    guard let input = try? AVCaptureDeviceInput(device: captureDevice!) else { return }

    captureSession.addInput(input)

    let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)

    view.layer.addSublayer(previewLayer)

    previewLayer.frame = CGRect(x: 0, y: 0, width: self.view.frame.width, height: self.view.frame.height - 70)

    let dataOutput = AVCaptureAudioDataOutput()
    dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue"))

    captureSession.addOutput(dataOutput)

}

//Mark: AVCaptureVideoOutputSampleBufferDelegate

func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {

    //print("Captured frame", Date())

    let cvPixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!

    guard let model = try? VNCoreMLModel(for: SqueezeNet().model) else { return }

    let request = VNCoreMLRequest(model: model) { (request, error) in

        if error != nil {
            print("error \(error!.localizedDescription)")
            return
        }

       // print("request \(request.results)")

        guard let result = request.results as? [VNClassificationObservation] else { return }

        guard let firstObservation = result.first else { return }

        DispatchQueue.main.async {

            let confidence = String(format: "%.2f", firstObservation.confidence * 100)

            self.resultLabel.text = "\(firstObservation.identifier, confidence) %"
        }
    }

    try? VNImageRequestHandler(cvPixelBuffer: cvPixelBuffer, options: [:]).perform([request])
}

}

Я скомпилировал вышеуказанный код с использованием модели SqueezeNet ML, но не уверен, почему я не вижу название элемента в ярлыке.Прикрепленный снимок экрана ниже.

enter image description here

Проблема заключается в том, что объект, захваченный в камере, не отображается в resultLabel.

...