Я пытаюсь создать приложение с аудио, видео и фото для жестов, используя AVFoundation, используя тот же AVCaptureSession.
Ниже описана процедура настройки камеры и сеанса съемки.
func setUpCaptureSession(){
captureSession.sessionPreset = AVCaptureSession.Preset.photo
}
func setUpDevice(){
let deviceDiscoverSession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)
let devices = deviceDiscoverSession.devices
for device in devices{
if device.position == AVCaptureDevice.Position.back{
backCamera = device
}else{
frontCamera = device
}
}
currentCamera = backCamera
}
func setUpInputOutput(){
do{
let captureDeviceInput = try AVCaptureDeviceInput(device: currentCamera!)
captureSession.addInput(captureDeviceInput)
photoOutput = AVCapturePhotoOutput()
photoOutput?.isHighResolutionCaptureEnabled = true
self.captureSession.addOutput(photoOutput!)
if #available(iOS 11.0, *) {
photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey : AVVideoCodecType.jpeg])], completionHandler: nil)
} else {
// Fallback on earlier versions
photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecJPEG])], completionHandler: nil)
}
}catch{
print(error)
}
}
func setUpPreviewLayer(){
cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
cameraPreviewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
cameraPreviewLayer?.frame = self.cameraView.frame
self.cameraView.layer.insertSublayer(cameraPreviewLayer!, at: 0)
}
func startRunningCaptureSession(){
captureSession.startRunning()
//Session starts with camera as default so set it to gesture
let doubleTap = UITapGestureRecognizer(target: self, action: #selector(takePhoto(sender:)))
doubleTap.numberOfTapsRequired = 2
self.view.addGestureRecognizer(doubleTap)
}
Чтобы сделать фотографию с камеры (которая работает, фотография сохраняется в галерее), я сделал следующее:
@objc func cameraAction(sender: UIButton){
if(imageSetAction()){
cameraImage.image = UIImage(named: "photo")
cameraLabel.textColor = ConstantColors.selectedTextColor
currentSelected = sender.tag
let doubleTap = UITapGestureRecognizer(target: self, action: #selector(takePhoto(sender:)))
doubleTap.numberOfTapsRequired = 2
self.view.addGestureRecognizer(doubleTap)
}
}
@objc func takePhoto(sender: Any){
guard let capturePhotoOutput = self.photoOutput else {
return
}
let photoSettings = AVCapturePhotoSettings()
photoSettings.isAutoStillImageStabilizationEnabled = true
photoSettings.isHighResolutionPhotoEnabled = true
photoSettings.flashMode = .auto
capturePhotoOutput.capturePhoto(with: photoSettings, delegate: self)
}
func photoOutput(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
guard error == nil,
let photoSampleBuffer = photoSampleBuffer else {
print("Error capturing photo: \(String(describing: error))")
return
}
guard let imageData =
AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: photoSampleBuffer, previewPhotoSampleBuffer: previewPhotoSampleBuffer) else {
return
}
let capturedImage = UIImage.init(data: imageData , scale: 1.0)
if let image = capturedImage {
//UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil)
let vc = UIStoryboard.init(name: "ImageEdit", bundle: Bundle.main).instantiateViewController(withIdentifier: "imageEdit") as? ImageEdit
vc?.imagetoEdit = image
self.navigationController?.pushViewController(vc!, animated: true)
}
}
, но когда я пытаюсь сделать фотографию из того же сеанса захвата, я получаю сообщение об ошибке «NSInvalidArgumentException», причина: «*** - [AVCaptureMovieFileOutput startRecordingToOutputFileURL: recordingDelegate:] Нет активных / включенных подключений» ». ниже приведен код того, что я пытался сделать.
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
print("completed")
}
@objc func videoAction(sender: UIButton){
if(imageSetAction()){
videoImage.image = UIImage(named: "video")
videoLabel.textColor = ConstantColors.selectedTextColor
currentSelected = sender.tag
captureSession.removeOutput(photoOutput!)
self.movieFileOutput = AVCaptureMovieFileOutput()
self.captureSession.addOutput(movieFileOutput!)
let longPressGesture = UILongPressGestureRecognizer.init(target: self, action: #selector(handleLongPress))
self.view.addGestureRecognizer(longPressGesture);
}
}
@objc func handleLongPress(gestureRecognizer: UILongPressGestureRecognizer) {
if gestureRecognizer.state == UIGestureRecognizerState.began {
debugPrint("long press started")
let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] as URL
let filePath = documentsURL.appendingPathComponent("tempMovie.mp4")
if FileManager.default.fileExists(atPath: filePath.absoluteString) {
do {
try FileManager.default.removeItem(at: filePath)
}
catch {
// exception while deleting old cached file
// ignore error if any
}
}
movieFileOutput?.startRecording(to: filePath, recordingDelegate: self)
}
else if gestureRecognizer.state == UIGestureRecognizerState.ended {
debugPrint("longpress ended")
movieFileOutput?.stopRecording()
}
}
Любая помощь по этому поводу приветствуется.