Я пытаюсь добавить звук микрофона устройства к видеозаписи с камеры устройства. Видео фильтруется с помощью CIFilter и работает должным образом. Моя проблема заключается в том, что звук mi c не прикрепляется к видео после сохранения.
Я пробовал установить параметры звука вручную, как это
let audioSettings : [String : Any] = [
AVFormatIDKey : kAudioFormatMPEG4AAC,
AVNumberOfChannelsKey: 1,
AVSampleRateKey : 44100,
AVEncoderBitRateKey : 64000
]
, но использование рекомендованного методаAudioSettingsForAssetWriter похоже на правильный подход, поскольку запись видео работает с рекомендуемым методом AudioSettingsForAssetWriter.
Может ли кто-нибудь сказать мне, как этого добиться или указать мне правильное направление?
Мой код на данный момент:
import UIKit
import AVFoundation
class VideoViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
@IBOutlet weak var imageView: UIImageView!
lazy var cameraDevice: AVCaptureDevice? = {
return AVCaptureDevice.default(for: .video)
}()
lazy var micDevice: AVCaptureDevice? = {
return AVCaptureDevice.default(for: .audio)
}()
var captureSession = AVCaptureSession()
var outputURL: URL!
var orientation: AVCaptureVideoOrientation = .landscapeRight
var filterObject = FilterObject()
var assetWriter: AVAssetWriter?
var assetWriterInput: AVAssetWriterInput?
var pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor?
var fileName = ""
var recordingState = RecordingState.idle
var time: Double = 0
let videoOutput = AVCaptureVideoDataOutput()
let audioOutput = AVCaptureAudioDataOutput()
let context = CIContext()
override func viewDidLoad() {
super.viewDidLoad()
setupCameraDevice()
setupAudioDevice()
setupInputOutput()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
setUpAuthStatus()
}
@IBAction func recordPressed(_ sender: UIButton) {
switch recordingState {
case .idle:
recordingState = .start
case .capturing:
recordingState = .end
default:
break
}
}
func setUpAuthStatus() {
if AVCaptureDevice.authorizationStatus(for: AVMediaType.video) != .authorized {
AVCaptureDevice.requestAccess(for: AVMediaType.video, completionHandler: { (authorized) in
DispatchQueue.main.async {
if authorized {
self.setupInputOutput()
}
}
})
}
if AVCaptureDevice.authorizationStatus(for: AVMediaType.audio) != .authorized {
AVCaptureDevice.requestAccess(for: AVMediaType.audio, completionHandler: { (authorized) in
DispatchQueue.main.async {
if authorized {
self.setupInputOutput()
}
}
})
}
}
func setupCameraDevice() {
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: .video, position: .unspecified)
let devices = deviceDiscoverySession.devices
for device in devices {
if device.position == .back {
cameraDevice = device
}
}
}
func setupAudioDevice() {
let audioDeviceDisoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInMicrophone], mediaType: .audio, position: .unspecified)
let devices = audioDeviceDisoverySession.devices
micDevice = devices[0]
}
func setupInputOutput() {
do {
guard let cameraDevice = cameraDevice else { return }
let captureDeviceInput = try AVCaptureDeviceInput(device: cameraDevice)
guard let micDevice = micDevice else { return }
let micDeviceInput = try AVCaptureDeviceInput(device: micDevice)
captureSession.sessionPreset = AVCaptureSession.Preset.hd1920x1080
if captureSession.canAddInput(captureDeviceInput) {
captureSession.addInput(captureDeviceInput)
}
if captureSession.canAddInput(micDeviceInput) {
captureSession.addInput(micDeviceInput)
}
let queue = DispatchQueue(label: "com.apple.sample.capturepipeline.video", attributes: [])
if captureSession.canAddOutput(videoOutput) {
videoOutput.setSampleBufferDelegate(self, queue: queue)
captureSession.addOutput(videoOutput)
}
if captureSession.canAddOutput(audioOutput) {
audioOutput.setSampleBufferDelegate(self, queue: queue)
captureSession.addOutput(audioOutput)
}
captureSession.commitConfiguration()
captureSession.startRunning()
} catch {
print(error)
}
}
func captureOutput(_ output: AVCaptureOutput,
didOutput sampleBuffer: CMSampleBuffer,
from connection: AVCaptureConnection) {
videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
audioOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {return}
let cameraImage = CIImage(cvImageBuffer: imageBuffer)
guard let name = filterObject.name else {return}
let effect = FilterType.genericFilter(name: name, cameraImage: cameraImage)
effect.setValue(cameraImage, forKey: kCIInputImageKey)
TableData.setFilterValues(withFilterName: name, effect: effect, values: [value1, value2])
guard let outputImage = effect.outputImage else { return }
context.render(outputImage, to: imageBuffer)
guard let cgImage = self.context.createCGImage(outputImage, from: cameraImage.extent) else { return }
DispatchQueue.main.async {
let filteredImage = UIImage(cgImage: cgImage)
self.imageView.image = filteredImage
}
let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer).seconds
switch recordingState {
case .start:
fileName = UUID().uuidString
let videoPath = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!.appendingPathComponent("\(fileName).mov")
let writer = try! AVAssetWriter(outputURL: videoPath, fileType: .mov)
let videoSettings = videoOutput.recommendedVideoSettingsForAssetWriter(writingTo: .mov)
let videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
videoInput.mediaTimeScale = CMTimeScale(bitPattern: 600)
videoInput.expectsMediaDataInRealTime = true
let audioSettings = audioOutput.recommendedAudioSettingsForAssetWriter(writingTo: .m4a)
let audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioSettings as? [String : Any])
audioInput.expectsMediaDataInRealTime = true
//videoInput.transform = CGAffineTransform(rotationAngle: .pi/2)
let pixelAdapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoInput, sourcePixelBufferAttributes: nil)
if writer.canAdd(videoInput) {
writer.add(videoInput)
}
if writer.canAdd(audioInput) {
writer.add(audioInput)
}
writer.startWriting()
writer.startSession(atSourceTime: .zero)
assetWriter = writer
assetWriterInput = videoInput
pixelBufferAdaptor = pixelAdapter
recordingState = .capturing
time = timestamp
case .capturing:
if assetWriterInput?.isReadyForMoreMediaData == true {
let newTime = CMTime(seconds: timestamp - time, preferredTimescale: CMTimeScale(600))
pixelBufferAdaptor?.append(imageBuffer, withPresentationTime: newTime)
}
break
case .end:
guard assetWriterInput?.isReadyForMoreMediaData == true, assetWriter!.status != .failed else { break }
let url = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!.appendingPathComponent("\(fileName).mov")
assetWriterInput?.markAsFinished()
assetWriter?.finishWriting { [weak self] in
self?.recordingState = .idle
self?.assetWriter = nil
self?.assetWriterInput = nil
DispatchQueue.main.async {
let activity = UIActivityViewController(activityItems: [url], applicationActivities: nil)
self?.present(activity, animated: true, completion: nil)
}
}
default:
break
}
}
}