Я пытаюсь взять AVCaptureSession с задней камеры и перенести его в текстуру, сопоставленную с четырехугольником.
Просмотреть полный исходный код здесь .
Независимо от того, какой пресет я использую, обратный вызов didDropSampleBuffer сообщает об OutOfBuffers.Я попытался скопировать sampleBuffer, переданный didOutputSampleBuffer, но, возможно, моя реализация имеет проблему.
Я также пытался использовать SERIAL_QUEUE, так как я знаю, что startRecording captureSession является функцией блокировки, и ее не должно быть в главной очереди.Однако использование основной очереди - единственный способ увидеть кадры.
Вот мои настройки AV:
- (void)setupAV
{
_sessionQueue = dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL);
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, self.context, NULL, &_videoTextureCache);
if (err) {
NSLog(@"Couldn't create video cache.");
return;
}
self.captureSession = [[AVCaptureSession alloc] init];
if (!self.captureSession) {
return;
}
[self.captureSession beginConfiguration];
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
AVCaptureDevicePosition devicePosition = AVCaptureDevicePositionBack;
AVCaptureDeviceDiscoverySession *deviceDiscoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo position:devicePosition];
for (AVCaptureDevice *device in deviceDiscoverySession.devices) {
if (device.position == devicePosition) {
self.captureDevice = device;
if (self.captureDevice != nil) {
break;
}
}
}
NSError *captureDeviceError = nil;
AVCaptureDeviceInput *input = [[AVCaptureDeviceInput alloc] initWithDevice:self.captureDevice error:&captureDeviceError];
if (captureDeviceError) {
NSLog(@"Couldn't configure device input.");
return;
}
if (![self.captureSession canAddInput:input]) {
NSLog(@"Couldn't add video input.");
[self.captureSession commitConfiguration];
return;
}
[self.captureSession addInput:input];
self.videoOutput = [[AVCaptureVideoDataOutput alloc] init];
if (!self.videoOutput) {
NSLog(@"Error creating video output.");
[self.captureSession commitConfiguration];
return;
}
self.videoOutput.alwaysDiscardsLateVideoFrames = YES;
NSDictionary *settings = [[NSDictionary alloc] initWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
self.videoOutput.videoSettings = settings;
[self.videoOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
if ([self.captureSession canAddOutput:self.videoOutput]) {
[self.captureSession addOutput:self.videoOutput];
} else {
NSLog(@"Couldn't add video output.");
[self.captureSession commitConfiguration];
return;
}
if (self.captureSession.isRunning) {
NSLog(@"Session is already running.");
[self.captureSession commitConfiguration];
return;
}
// NSError *configLockError;
// int frameRate = 24;
// [self.captureDevice lockForConfiguration:&configLockError];
// self.captureDevice.activeVideoMinFrameDuration = CMTimeMake(1, frameRate);
// self.captureDevice.activeVideoMaxFrameDuration = CMTimeMake(1, frameRate);
// [self.captureDevice unlockForConfiguration];
//
// if (configLockError) {
// NSLog(@"Error locking for configuration. %@", configLockError);
// }
[self.captureSession commitConfiguration];
}
А вот мой обратный вызов captureOutput:
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
// if (_sampleBuffer) {
// CFRelease(_sampleBuffer);
// _sampleBuffer = nil;
// }
//
// OSStatus status = CMSampleBufferCreateCopy(kCFAllocatorDefault, sampleBuffer, &_sampleBuffer);
// if (noErr != status) {
// _sampleBuffer = nil;
// }
//
// if (!_sampleBuffer) {
// return;
// }
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (!_videoTextureCache) {
NSLog(@"No video texture cache");
return;
}
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
size_t width = CVPixelBufferGetWidth(pixelBuffer);
size_t height = CVPixelBufferGetHeight(pixelBuffer);
_rgbaTexture = nil;
// Periodic texture cache flush every frame
CVOpenGLESTextureCacheFlush(_videoTextureCache, 0);
// CVOpenGLESTextureCacheCreateTextureFromImage will create GLES texture
// optimally from CVImageBufferRef.
glActiveTexture(GL_TEXTURE0);
CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
_videoTextureCache,
pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_RGBA,
(GLsizei)width,
(GLsizei)height,
GL_BGRA,
GL_UNSIGNED_BYTE,
0,
&_rgbaTexture);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
if (err) {
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
if (_rgbaTexture) {
glBindTexture(CVOpenGLESTextureGetTarget(_rgbaTexture), CVOpenGLESTextureGetName(_rgbaTexture));
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
}
Для полноты, вот объявления ivar и свойства:
@interface AVViewController () <AVCaptureVideoDataOutputSampleBufferDelegate> {
CVOpenGLESTextureRef _rgbaTexture;
CVOpenGLESTextureCacheRef _videoTextureCache;
dispatch_queue_t _sessionQueue;
GLuint _program;
GLuint _vertexArray;
GLuint _vertexBuffer;
CMSampleBufferRef _sampleBuffer;
}
@property (nonatomic, strong) EAGLContext *context;
@property (nonatomic, strong) AVCaptureSession *captureSession;
@property (nonatomic, strong) AVCaptureDevice *captureDevice;
@property (nonatomic, strong) AVCaptureVideoDataOutput *videoOutput;
@property (readwrite) GLint vertexAttrib;
@property (readwrite) GLint textureAttrib;
@property (readwrite) GLint videoFrameUniform;
Я искал и искал и не могу найти решение этой проблемы.Любая помощь будет принята с благодарностью.