РЕДАКТИРОВАТЬ : Добавлен функциональный образец Swift, соответствующий вашим требованиям к языку:
import UIKit
import AVFoundation
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
@IBOutlet weak var cameraView: UIView!
var session: AVCaptureSession!
var device: AVCaptureDevice?
var input: AVCaptureDeviceInput?
var videoOutput: AVCaptureVideoDataOutput!
var output: AVCaptureMetadataOutput?
var prevLayer: AVCaptureVideoPreviewLayer!
override func viewDidLoad() {
super.viewDidLoad()
session = AVCaptureSession()
device = AVCaptureDevice.default(for: AVMediaType.video)
do{
input = try AVCaptureDeviceInput(device: device!)
}
catch{
print(error)
return
}
if let input = input {
if session.canAddInput(input) {
session.addInput(input)
}
}
videoOutput = AVCaptureVideoDataOutput()
videoOutput.videoSettings = [
String(kCVPixelBufferPixelFormatTypeKey): NSNumber(value: kCVPixelFormatType_32BGRA)
]
videoOutput.alwaysDiscardsLateVideoFrames = true
let queue = DispatchQueue(label: "video-frame-sampler")
videoOutput!.setSampleBufferDelegate(self, queue: queue)
if session.canAddOutput(videoOutput) {
session.addOutput(videoOutput)
if let connection = videoOutput.connection(with: .video) {
connection.videoOrientation = videoOrientationFromInterfaceOrientation()
if connection.isVideoStabilizationSupported {
connection.preferredVideoStabilizationMode = .auto
}
}
}
prevLayer = AVCaptureVideoPreviewLayer(session: session)
prevLayer.frame.size = cameraView.frame.size
prevLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
cameraView.layer.addSublayer(prevLayer!)
session.startRunning()
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
//pass your sampleBuffer to vision API
//I recommend not to pass every frame however, skip some frames until camera is steady and focused
print("frame received")
}
func videoOrientationFromInterfaceOrientation() -> AVCaptureVideoOrientation {
return AVCaptureVideoOrientation(rawValue: UIApplication.shared.statusBarOrientation.rawValue)!
}
}
Я вижу, что вы уже настроили слой ввода и предварительного просмотра, но вам нужнотакже настройте вывод видеозахвата для захвата ваших CMSampleBufferRef
кадров.
Для этого настройте объект типа AVCaptureVideoDataOutput
, выполнив следующие шаги:
Создайте экземпляр AVCaptureVideoDataOutput
и настройте
AVCaptureVideoDataOutput* videoOutput = [[AVCaptureVideoDataOutput new] autorelease];
videoOutput.videoSettings = @{(id)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)};
videoOutput.alwaysDiscardsLateVideoFrames = YES;
Установите делегат захвата кадра (пример буфера) сконфигурированного вывода и добавьте его в сеанс
dispatch_queue_t queue = dispatch_queue_create("video-frame-sampler", 0);
[videoOutput setSampleBufferDelegate:self queue:queue];
if ([self.session canAddOutput:videoOutput]) {
[self.session addOutput:videoOutput];
AVCaptureConnection* connection = [videoOutput connectionWithMediaType:AVMediaTypeVideo];
connection.videoOrientation = [self videoOrientationFromDeviceOrientation];
if (connection.supportsVideoStabilization) {
connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto;
}
}
Реализация captureOutput:didOutputSampleBuffer:fromConnection:
метода, где вы собираетесь получить требуемый CMSampleBufferRef
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
//pass your sampleBuffer to vision API
//I recommend not to pass every frame however, skip some frames until camera is steady and focused
}
Извините, я простой старый Цель-C, но надеюсь, что вы можете легко преобразовать код в Swift в соответствии с вашими потребностями.
Кроме того, вот код для videoOrientationFromDeviceOrientation
метода:
-(AVCaptureVideoOrientation)videoOrientationFromDeviceOrientation {
UIDeviceOrientation orientation = [UIDevice currentDevice].orientation;
AVCaptureVideoOrientation result = (AVCaptureVideoOrientation)orientation;
if ( orientation == UIDeviceOrientationLandscapeLeft )
result = AVCaptureVideoOrientationLandscapeRight;
else if ( orientation == UIDeviceOrientationLandscapeRight )
result = AVCaptureVideoOrientationLandscapeLeft;
return result;
}