Приложение выходит из строя: необходимо начать сеанс (используя - [AVAssetWriter startSessionAtSourceTime :) - PullRequest
0 голосов
/ 22 января 2020

Я пытаюсь записать видео, используя AVAssetWriter, и пытаюсь начать и остановить запись, используя следующие строки кода для создания 1-минутных фрагментов видео, а также когда приложение переходит в фоновый режим. Я добавил условия перед добавлением pixelbuffer.

videoWriter.endSession(atSourceTime: presentationTime); videoWriter.finishWriting(completionHandler: {[weak self] in }

, но приложение несколько раз зависало. Я добавил условия перед добавлением пиксельного буфера, но все равно его сбой.

import UIKit
import AVFoundation
import Vision
protocol CameraDelegate:class {
    func didFailInitializing(_ error:String)
    func didFinishRecording(_ outputUrl:URL?, error:Error?)
    func didFindMultipleFaces()
    func didNotFindFace()
    func didFindFace()
    func volumeLevel(_ value:Float)
}
class Camera: UIView, AVCaptureFileOutputRecordingDelegate,AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate {
    
    
    weak var delegate:CameraDelegate?
    var enableFaceDetection:Bool = false
    let captureSession = AVCaptureSession()
    // VNRequest: Either Retangles or Landmarks
    private var faceDetectionRequest: VNRequest?
    let movieOutput = AVCaptureMovieFileOutput()
    var videoDataOutput: AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()
    var audioDataOutput: AVCaptureAudioDataOutput = AVCaptureAudioDataOutput()
    var videoDataOutputQueue = DispatchQueue(label: "VideoDataOutputQueue")
    var requests = [VNRequest]()
    var previewLayer: AVCaptureVideoPreviewLayer!
    private var devicePosition: AVCaptureDevice.Position = .front
    var activeInput: AVCaptureDeviceInput!
    
    var sampleBufferGlobal : CMSampleBuffer?
    //    let writerFileName = "tempVideoAsset.mov"
    var presentationTime : CMTime!
    //    var outputSettings   = [String: Any]()
    var videoWriterInput: AVAssetWriterInput!
    var videoWriterAudioInput: AVAssetWriterInput!
    var videoWriter: AVAssetWriter!
    
    
    var isRecodingStarted = false
    var isRecordingCompleted = false
    
    var isRecording = false
    var isRecordingEnable = false
    var outputURL: URL!
    var recordingCancelled:Bool = false
    var timer:Timer?
    override init(frame: CGRect) {
        super.init(frame: frame)
        let notificationCenter = NotificationCenter.default
//        notificationCenter.addObserver(self, selector: #selector(didEnterBackground), name: UIApplication.didEnterBackgroundNotification, object: nil)
//        notificationCenter.addObserver(self, selector: #selector(didBecomeActive), name: UIApplication.didBecomeActiveNotification, object: nil)
        // preventing memory leaks...
        notificationCenter.addObserver(forName: Notification.Name.AVCaptureSessionWasInterrupted, object: nil, queue: .main, using: {[weak self] _ in
            if let strong = self {
                print("interupted")
                if strong.isRecordingEnable {
                    strong.stopVideoCapturing()
                }
            }
        })
        notificationCenter.addObserver(forName: Notification.Name.AVCaptureSessionInterruptionEnded, object: nil, queue: .main, using: {[weak self] _ in
            guard let strong = self else { return }
            if strong.isRecordingEnable && !strong.isRecordingCompleted {
                print("interupted ended")
//                if !strong.captureSession.isRunning {
//                    strong.setupSession()
//                }
                strong.startVideoCapturing()
            }
        })
    }
    required init?(coder aDecoder: NSCoder) {
        super.init(coder: aDecoder)
        
    }
    deinit {
        print("camera deinitialised")
    }
    private var audioInput: AVAssetWriterInput?
    var videoWriterInputPixelBufferAdaptor:AVAssetWriterInputPixelBufferAdaptor?
    //MARK:- Recording Code
    func setupSession () {
        //Set queues
        videoDataOutputQueue = DispatchQueue(label: "myqueue", qos: .utility, attributes: .concurrent, autoreleaseFrequency: DispatchQueue.AutoreleaseFrequency.inherit, target: DispatchQueue.global())
        
        
        //The size of output video will be 720x1280
        // print("Established AVCaptureSession")
        captureSession.sessionPreset = AVCaptureSession.Preset.low
        //Setup your camera
        //Detect which type of camera should be used via `isUsingFrontFacingCamera`
        let videoDevice: AVCaptureDevice
        videoDevice = AVCaptureDevice.default(AVCaptureDevice.DeviceType.builtInWideAngleCamera, for: AVMediaType.video, position: AVCaptureDevice.Position.front)!
        // print("Created AVCaptureDeviceInput: video")
        
        //Setup your microphone
        var audioDevice: AVCaptureDevice
        //audioDevice = AVCaptureDevice.default(for: AVMediaType.audio)!
        audioDevice = AVCaptureDevice.default(AVCaptureDevice.DeviceType.builtInMicrophone, for: AVMediaType.audio, position: AVCaptureDevice.Position.unspecified)!
        //print("Created AVCaptureDeviceInput: audio")//
        
        
        do {
            captureSession.beginConfiguration()
            captureSession.automaticallyConfiguresApplicationAudioSession = false
            captureSession.usesApplicationAudioSession = true
            
            
            // Add camera to your session
            let videoInput = try AVCaptureDeviceInput(device: videoDevice)
            if captureSession.canAddInput(videoInput) {
                captureSession.addInput(videoInput)
                //print("Added AVCaptureDeviceInput: video")
            } else
            {
                //print("Could not add VIDEO!!!")
            }
            
            // Add microphone to your session
            let audioInput = try AVCaptureDeviceInput(device: audioDevice)
            if captureSession.canAddInput(audioInput) {
                captureSession.addInput(audioInput)
                //print("Added AVCaptureDeviceInput: audio")
            } else
            {
                //print("Could not add MIC!!!")
            }
            
            
            //Define your video output
            videoDataOutput.videoSettings = [
                kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA,
            ]
            videoDataOutput.alwaysDiscardsLateVideoFrames = true
            if captureSession.canAddOutput(videoDataOutput) {
                videoDataOutput.setSampleBufferDelegate(self, queue: videoDataOutputQueue)
                captureSession.addOutput(videoDataOutput)
                print("Added AVCaptureDataOutput: video")
            }
            
            
            //Define your audio output
            if captureSession.canAddOutput(audioDataOutput) {
                audioDataOutput.setSampleBufferDelegate(self, queue: videoDataOutputQueue)
                captureSession.addOutput(audioDataOutput)
                print("Added AVCaptureDataOutput: audio")
            }
            
            
            previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
            previewLayer.frame = self.bounds
            previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
            self.layer.addSublayer(previewLayer)
            //cameraView.layer.addSublayer(previewLayer)
            print("Created AVCaptureVideoPreviewLayer")
            
            
            //Don't forget start running your session
            //this doesn't mean start record!
            captureSession.commitConfiguration()
            print("start running session")
            captureSession.startRunning()
            
            
            self.faceDetectionRequest = VNDetectFaceRectanglesRequest(completionHandler: self.handleFaces1) // Default
            self.setupVision()
        }
        catch let error {
            debugPrint(error.localizedDescription)
        }
    }
    func setupWriter() throws {
        //Set up the AVAssetWriter (to write to file)
        do {
            self.outputURL = tempURL()!
            videoWriter = try AVAssetWriter(outputURL: self.outputURL, fileType: AVFileType.mov)
            print("Setup AVAssetWriter")
            
            
            //Video Settings
            let videoSettings: [String : Any] = [
                AVVideoCodecKey  : AVVideoCodecType.h264,
                AVVideoWidthKey  : 720,
                AVVideoHeightKey : 1280,
            ]
            videoWriterInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: videoSettings)
            videoWriterInput?.expectsMediaDataInRealTime = true;
            print("Setup AVAssetWriterInput: Video")
            if (videoWriter?.canAdd(videoWriterInput!))!
            {
                videoWriter?.add(videoWriterInput!)
                print("Added AVAssetWriterInput: Video")
            } else{
                print("Could not add VideoWriterInput to VideoWriter")
            }
            
            
            // Add the audio input
            
            //Audio Settings
            let audioSettings : [String : Any] = [
                AVFormatIDKey : kAudioFormatMPEG4AAC,
                AVSampleRateKey : 44100,
                AVEncoderBitRateKey : 64000,
                AVNumberOfChannelsKey: 1
            ]
            videoWriterAudioInput = AVAssetWriterInput(mediaType: AVMediaType.audio, outputSettings: audioSettings)
            videoWriterAudioInput?.expectsMediaDataInRealTime = true;
            print("Setup AVAssetWriterInput: Audio")
            if (videoWriter?.canAdd(videoWriterAudioInput!))!
            {
                videoWriter?.add(videoWriterAudioInput!)
                print("Added AVAssetWriterInput: Audio")
            } else{
                print("Could not add AudioWriterInput to VideoWriter")
            }
            
            videoWriter.shouldOptimizeForNetworkUse = false
            //PixelWriter
            videoWriterInputPixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput!, sourcePixelBufferAttributes: [
                kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA,
                kCVPixelBufferWidthKey as String: 1280,
                kCVPixelBufferHeightKey as String: 768,
                kCVPixelFormatOpenGLESCompatibility as String: true,
                ])
            print("Created AVAssetWriterInputPixelBufferAdaptor")
            
        }
        catch let error {
            throw error
        }
    }
    func exifOrientationFromDeviceOrientation() -> UInt32 {
        enum DeviceOrientation: UInt32 {
            case top0ColLeft = 1
            case top0ColRight = 2
            case bottom0ColRight = 3
            case bottom0ColLeft = 4
            case left0ColTop = 5
            case right0ColTop = 6
            case right0ColBottom = 7
            case left0ColBottom = 8
        }
        var exifOrientation: DeviceOrientation
        
        switch UIDevice.current.orientation {
        case .portraitUpsideDown:
            exifOrientation = .left0ColBottom
        case .landscapeLeft:
            exifOrientation = devicePosition == .front ? .left0ColBottom : .left0ColBottom
        case .landscapeRight:
            exifOrientation = devicePosition == .front ? .left0ColBottom : .left0ColBottom
        default:
            exifOrientation = devicePosition == .front ? .left0ColBottom : .left0ColBottom
        }
        return exifOrientation.rawValue
    }
    fileprivate func canWrite() -> Bool {
        return isRecording && videoWriter != nil && videoWriter.status == .writing
    }
    
    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        autoreleasepool {
            guard CMSampleBufferDataIsReady(sampleBuffer) else{
                return
            }
            
            
            ////audio buffer
            
            var buffer: CMBlockBuffer? = nil
            var audioBufferList = AudioBufferList(mNumberBuffers: 1,
                                                  mBuffers: AudioBuffer(mNumberChannels: 1, mDataByteSize: 0, mData: nil))
            CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
                sampleBuffer,
                bufferListSizeNeededOut: nil,
                bufferListOut: &audioBufferList,
                bufferListSize: MemoryLayout<AudioBufferList>.size,
                blockBufferAllocator: nil,
                blockBufferMemoryAllocator: nil,
                flags: UInt32(kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment),
                blockBufferOut: &buffer
            )
            let abl = UnsafeMutableAudioBufferListPointer(&audioBufferList)
            var sum:Int64 = 0
            var count:Int = 0
            var bufs:Int = 0
            for buff in abl {
                let samples = UnsafeMutableBufferPointer<Int16>(start: UnsafeMutablePointer(OpaquePointer(buff.mData)),
                                                                count: Int(buff.mDataByteSize)/MemoryLayout<Int16>.size)
                for sample in samples {
                    let s = Int64(sample)
                    sum = (sum + s*s)
                    count += 1
                }
                bufs += 1
                if samples.count > 0 {
                    DispatchQueue.main.async {
                        self.delegate?.volumeLevel(sqrt(Float(sum/Int64(samples.count))))
                    }
                }
            }
            if canWrite(), presentationTime == nil {
                presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
                videoWriter.startSession(atSourceTime: presentationTime)
            }
            
            if !self.isRecording
            {
                return
            }
            if let audio = self.videoWriterAudioInput
            {
                if connection.audioChannels.count > 0
                {
                    readyForMediaCondition?.lock()
                    while !audio.isReadyForMoreMediaData {
                        readyForMediaCondition?.wait()
                    }
                    readyForMediaCondition?.unlock()
                    audio.append(sampleBuffer)
                    readyForMediaObservation?.invalidate()
//                    if audio.isReadyForMoreMediaData, timer?.isValid ?? false
//                    {
//
//                        videoDataOutputQueue.async() {
//                            audio.append(sampleBuffer)
//                        }
//                        return
//                    }
                }
            }
            if (connection.isVideoOrientationSupported) {
                connection.videoOrientation = .portrait
            } else
            {
                return
            }
            if connection.videoOrientation != .portrait {
                return
            }
            if (connection.isVideoStabilizationSupported) {
                //connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.auto
            }
            
            guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer),
                let exifOrientation = CGImagePropertyOrientation(rawValue: exifOrientationFromDeviceOrientation()) else { return }
//            let mycapturedimage = imageFromSampleBuffer(sampleBuffer: sampleBuffer)
            
            let sessionAtSourceTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
            self.presentationTime = sessionAtSourceTime
            let srcPtr = Unmanaged.passUnretained(imageBuffer).toOpaque()
            let pixelBuffer = Unmanaged<CVPixelBuffer>.fromOpaque(srcPtr).takeUnretainedValue()
            if self.videoWriterInput.isReadyForMoreMediaData, self.timer?.isValid ?? false {
                //This is getting called!!!
                if self.videoWriter.status != .failed {
                    self.videoWriterInputPixelBufferAdaptor?.append(pixelBuffer, withPresentationTime: sessionAtSourceTime)
                }
            }
            let t = CMTimeGetSeconds(sessionAtSourceTime)
            if self.enableFaceDetection && "\(t)".contains(".0") {
                print(t)
                var requestOptions: [VNImageOption: Any] = [:]
                
                if let cameraIntrinsicData = CMGetAttachment(sampleBuffer, key: kCMSampleBufferAttachmentKey_CameraIntrinsicMatrix, attachmentModeOut: nil) {
                    requestOptions = [.cameraIntrinsics: cameraIntrinsicData]
                }
                
                let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: exifOrientation, options: requestOptions)
                
                do {
                    try imageRequestHandler.perform(requests)
                } catch {
                    print(error)
                }
            }
        }
        
        
        
    }//End autoreleasepool
    
    func imageFromSampleBuffer(sampleBuffer : CMSampleBuffer) -> UIImage
    {
        // Get a CMSampleBuffer's Core Video image buffer for the media data
        let  imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        // Lock the base address of the pixel buffer
        CVPixelBufferLockBaseAddress(imageBuffer!, CVPixelBufferLockFlags.readOnly);
        
        
        // Get the number of bytes per row for the pixel buffer
        let baseAddress = CVPixelBufferGetBaseAddress(imageBuffer!);
        
        // Get the number of bytes per row for the pixel buffer
        let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer!);
        // Get the pixel buffer width and height
        let width = CVPixelBufferGetWidth(imageBuffer!);
        let height = CVPixelBufferGetHeight(imageBuffer!);
        
        // Create a device-dependent RGB color space
        let colorSpace = CGColorSpaceCreateDeviceRGB();
        
        // Create a bitmap graphics context with the sample buffer data
        var bitmapInfo: UInt32 = CGBitmapInfo.byteOrder32Little.rawValue
        bitmapInfo |= CGImageAlphaInfo.premultipliedFirst.rawValue & CGBitmapInfo.alphaInfoMask.rawValue
        //let bitmapInfo: UInt32 = CGBitmapInfo.alphaInfoMask.rawValue
        let context = CGContext.init(data: baseAddress, width: width, height: height, bitsPerComponent: 8, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: bitmapInfo)
        // Create a Quartz image from the pixel data in the bitmap graphics context
        let quartzImage = context?.makeImage();
        // Unlock the pixel buffer
        CVPixelBufferUnlockBaseAddress(imageBuffer!, CVPixelBufferLockFlags.readOnly);
        
        // Create an image object from the Quartz image
        let image = UIImage.init(cgImage: quartzImage!);
        
        return (image);
    }
    
    func buffer(from image: UIImage) -> CVPixelBuffer? {
        let attrs = [kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue, kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue] as CFDictionary
        var pixelBuffer : CVPixelBuffer?
        let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(image.size.width), Int(image.size.height), kCVPixelFormatType_32ARGB, attrs, &pixelBuffer)
        guard (status == kCVReturnSuccess) else {
            return nil
        }
        
        CVPixelBufferLockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue: 0))
        let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer!)
        
        let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
        let context = CGContext(data: pixelData, width: Int(image.size.width), height: Int(image.size.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
        
        context?.translateBy(x: 0, y: image.size.height)
        context?.scaleBy(x: 1.0, y: -1.0)
        
        UIGraphicsPushContext(context!)
        image.draw(in: CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height))
        UIGraphicsPopContext()
        CVPixelBufferUnlockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue: 0))
        
        return pixelBuffer
    }
    
    
    
    
    
    func setupCaptureMode(_ mode: Int) {
        // Video Mode
        
    }
    
    func videoQueue() -> DispatchQueue {
        return DispatchQueue.main
    }
    
    fileprivate func setupVision() {
        if let req = faceDetectionRequest {
            self.requests = [req]
        }
    }
    
    func handleFaces1(request: VNRequest, error: Error?) {
        DispatchQueue.main.async {
            
            //perform all the UI updates on the main queue
            guard let results = request.results as? [VNFaceObservation] else { return }
            if results.count > 1 {
                self.delegate?.didFindMultipleFaces()
            } else if results.count == 1  {
                //face detected
                self.delegate?.didFindFace()
            } else {
                self.delegate?.didNotFindFace()
            }
            
        }
    }
    
    func currentVideoOrientation() -> AVCaptureVideoOrientation {
        var orientation: AVCaptureVideoOrientation
        
        switch UIDevice.current.orientation {
        case .portrait:
            orientation = AVCaptureVideoOrientation.portrait
        case .landscapeRight:
            orientation = AVCaptureVideoOrientation.landscapeLeft
        case .portraitUpsideDown:
            orientation = AVCaptureVideoOrientation.portraitUpsideDown
        default:
            orientation = AVCaptureVideoOrientation.portrait
        }
        return orientation
    }
    func startVideoCapturing() {
        if !isRecording {
            self.recordingCancelled = false
            startRecording()
            if !(timer?.isValid ?? false) {
                timer = Timer.scheduledTimer(withTimeInterval: 30, repeats: true, block: {[weak self] _ in
                    if self?.presentationTime != nil {
                        self?.breakVideoChunk()
                    }
                })
            }
        }
    }
    func stopVideoCapturing() {
        timer?.invalidate()
        guard isRecording else { return }
        isRecording = false
        if presentationTime != nil && videoWriter.status == .writing {
            videoWriter.endSession(atSourceTime: presentationTime)
            videoWriter.finishWriting(completionHandler: {[weak self] in
                self?.presentationTime = nil
                guard let url = self?.videoWriter.outputURL else {
                    return
                }
                self?.delegate?.didFinishRecording(url, error: nil)
            })
        } else {
            videoWriter.cancelWriting()
        }
    }
    func stopSession() {
        NotificationCenter.default.removeObserver(self)
        if timer?.isValid ?? false {
            timer?.invalidate()
        }
        if let writer = self.videoWriter {
            writer.cancelWriting()
        }
        DispatchQueue.global().async {
            if self.captureSession.isRunning {
                self.captureSession.stopRunning()
            }
        }
    }
    func tempURL() -> URL? {
        let directory = NSTemporaryDirectory() as NSString
        
        if directory != "" {
            let path = directory.appendingPathComponent(NSUUID().uuidString + ".mov")
            return URL(fileURLWithPath: path)
        }
        
        return nil
    }
    var readyForMediaCondition:NSCondition?
    var readyForMediaObservation:NSKeyValueObservation?
    fileprivate func startRecording() {
        guard !isRecording else { return }
        do {
            try setupWriter()
            videoWriter.startWriting()
            readyForMediaCondition = NSCondition()
            readyForMediaObservation = videoWriterAudioInput.observe(\.isReadyForMoreMediaData, changeHandler: {[weak self] _, change in
                guard let isReady = change.newValue else {
                    return
                }

                if isReady {
                    self?.readyForMediaCondition?.lock()
                    self?.readyForMediaCondition?.signal()
                    self?.readyForMediaCondition?.unlock()
                }
            })
            isRecording = true
        } catch let error {
            print("error::\(error)")
            self.delegate?.didFailInitializing(error.localizedDescription)
        }
    }
    fileprivate func breakVideoChunk() {
        guard isRecording else { return }
        isRecording = false
        videoWriter.endSession(atSourceTime: presentationTime)
        videoWriter.finishWriting(completionHandler: {[weak self] in
            self?.presentationTime = nil
            guard let url = self?.videoWriter.outputURL else {
                return
            }
            self?.delegate?.didFinishRecording(url, error: nil)
            DispatchQueue.main.async {
                if let strong = self {
                    if !strong.recordingCancelled {
                        strong.startRecording()
                    }
                }
            }
        })
    }
    func cancelRecording() {
        timer?.invalidate()
        recordingCancelled = true
        if isRecording {
            print("is still running")
            videoWriter.endSession(atSourceTime: presentationTime)
            videoWriter.cancelWriting()
            isRecording = false
        }
    }
    func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {
        
    }
    
    func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
        
        if (error != nil) {
            
            print("Error recording movie: \(error!.localizedDescription)")
            
        } else {
            
            let videoRecorded = outputURL! as URL
            self.delegate?.didFinishRecording(videoRecorded, error: error)
            print(videoRecorded)
        }
        
    }
    
    func uploadRecording(fileUrl:URL?,error:Error?){
        self.delegate?.didFinishRecording(fileUrl, error: error)
    }
}
Добро пожаловать на сайт PullRequest, где вы можете задавать вопросы и получать ответы от других членов сообщества.
...