Я пытаюсь создать пользовательский патч для Quartz Composer, который будет функционировать так же, как патч видеовхода, но с выбираемым устройством захвата на входном порте.Это небольшой патч, и он мне подходит, но когда я подключил DV-устройство (Canopus ADVC-110) и выбрал его, ColorSpace равен (null), и я получаю исключение.Он отлично работает для камеры FaceTime HD, которая является видео типом мультимедиа.Я, должно быть, что-то упускаю, но просто не вижу этого.
Функция делегата captureOutput запускается снова и снова, как будто появляются новые кадры, и захват, кажется, начинается нормально.Чего мне не хватает?
#import <OpenGL/CGLMacro.h>
#import "CaptureWithDevice.h"
#define kQCPlugIn_Name @"Capture With Device"
#define kQCPlugIn_Description @"Servies as a replacement for the default Video Input patch, and differs in that it allows the input device to be specified by the user."
@implementation CaptureWithDevice
@dynamic inputDevice, outputImage;
+ (NSDictionary*) attributes
{
return [NSDictionary dictionaryWithObjectsAndKeys:
kQCPlugIn_Name, QCPlugInAttributeNameKey,
kQCPlugIn_Description, QCPlugInAttributeDescriptionKey,
nil];
}
+ (NSDictionary*) attributesForPropertyPortWithKey:(NSString*)key
{
if([key isEqualToString:@"inputDevice"]) {
NSArray *videoDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
NSArray *muxedDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeMuxed];
NSMutableArray *mutableArrayOfDevice = [[NSMutableArray alloc] init ];
[mutableArrayOfDevice addObjectsFromArray:videoDevices];
[mutableArrayOfDevice addObjectsFromArray:muxedDevices];
NSArray *devices = [NSArray arrayWithArray:mutableArrayOfDevice];
[mutableArrayOfDevice release];
NSMutableArray *deviceNames= [NSMutableArray array];
int i, ic= [devices count];
for(i= 0; i<ic; i++) {
[deviceNames addObject:[[devices objectAtIndex:i] description]];
// be sure not to add CT to the list
}
return [NSDictionary dictionaryWithObjectsAndKeys:
@"Device", QCPortAttributeNameKey,
QCPortTypeIndex,QCPortAttributeTypeKey,
[NSNumber numberWithInt:0], QCPortAttributeMinimumValueKey,
deviceNames, QCPortAttributeMenuItemsKey,
[NSNumber numberWithInt:ic-1], QCPortAttributeMaximumValueKey,
nil];
}
if([key isEqualToString:@"outputImage"])
return [NSDictionary dictionaryWithObjectsAndKeys:
@"Video Image", QCPortAttributeNameKey,
nil];
return nil;
}
+ (QCPlugInExecutionMode) executionMode
{
return kQCPlugInExecutionModeProvider;
}
+ (QCPlugInTimeMode) timeMode
{
return kQCPlugInTimeModeIdle;
}
- (id) init
{
if(self = [super init]) {
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(_devicesDidChange:)
name:QTCaptureDeviceWasConnectedNotification
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(_devicesDidChange:)
name:QTCaptureDeviceWasDisconnectedNotification
object:nil];
}
return self;
}
- (void) finalize
{
[super finalize];
}
- (void) dealloc
{
if (mCaptureSession) {
[mCaptureSession release];
[mCaptureDeviceInput release];
[mCaptureDecompressedVideoOutput release];
}
[[NSNotificationCenter defaultCenter] removeObserver:self];
[super dealloc];
}
@end
@implementation CaptureWithDevice (Execution)
- (BOOL) startExecution:(id<QCPlugInContext>)context
{
return YES;
}
- (void) enableExecution:(id<QCPlugInContext>)context
{
}
static void _BufferReleaseCallback(const void* address, void* info)
{
CVPixelBufferUnlockBaseAddress(info, 0);
CVBufferRelease(info);
}
- (BOOL) execute:(id<QCPlugInContext>)context atTime:(NSTimeInterval)time withArguments:(NSDictionary*)arguments
{
if (!mCaptureSession || [mCaptureSession isRunning]==NO || _currentDevice!=self.inputDevice){
NSError *error = nil;
BOOL success;
NSArray *videoDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
NSArray *muxedDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeMuxed];
NSMutableArray *mutableArrayOfDevice = [[NSMutableArray alloc] init ];
[mutableArrayOfDevice addObjectsFromArray:videoDevices];
[mutableArrayOfDevice addObjectsFromArray:muxedDevices];
NSArray *devices = [NSArray arrayWithArray:mutableArrayOfDevice];
[mutableArrayOfDevice release];
NSUInteger d= self.inputDevice;
if (!(d<[devices count])) {
d= 0;
}
QTCaptureDevice *device = [devices objectAtIndex:d];
success = [device open:&error];
if (!success) {
NSLog(@"Could not open device %@", device);
self.outputImage = nil;
return YES;
}
NSLog(@"Opened device successfully");
[mCaptureSession release];
mCaptureSession = [[QTCaptureSession alloc] init];
[mCaptureDeviceInput release];
mCaptureDeviceInput = [[QTCaptureDeviceInput alloc] initWithDevice:device];
// if the device is a muxed connection make sure to get the right connection
if ([muxedDevices containsObject:device]) {
NSLog(@"Disabling audio connections");
NSArray *ownedConnections = [mCaptureDeviceInput connections];
for (QTCaptureConnection *connection in ownedConnections) {
NSLog(@"MediaType: %@", [connection mediaType]);
if ( [[connection mediaType] isEqualToString:QTMediaTypeSound]) {
[connection setEnabled:NO];
NSLog(@"disabling audio connection");
}
}
}
success = [mCaptureSession addInput:mCaptureDeviceInput error:&error];
if (!success) {
NSLog(@"Failed to add Input");
self.outputImage = nil;
if (mCaptureSession) {
[mCaptureSession release];
mCaptureSession= nil;
}
if (mCaptureDeviceInput) {
[mCaptureDeviceInput release];
mCaptureDeviceInput= nil;
}
return YES;
}
NSLog(@"Adding output");
[mCaptureDecompressedVideoOutput release];
mCaptureDecompressedVideoOutput = [[QTCaptureDecompressedVideoOutput alloc] init];
[mCaptureDecompressedVideoOutput setPixelBufferAttributes:
[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferOpenGLCompatibilityKey,
[NSNumber numberWithLong:k32ARGBPixelFormat], kCVPixelBufferPixelFormatTypeKey, nil]];
[mCaptureDecompressedVideoOutput setDelegate:self];
success = [mCaptureSession addOutput:mCaptureDecompressedVideoOutput error:&error];
if (!success) {
NSLog(@"Failed to add output");
self.outputImage = nil;
if (mCaptureSession) {
[mCaptureSession release];
mCaptureSession= nil;
}
if (mCaptureDeviceInput) {
[mCaptureDeviceInput release];
mCaptureDeviceInput= nil;
}
if (mCaptureDecompressedVideoOutput) {
[mCaptureDecompressedVideoOutput release];
mCaptureDecompressedVideoOutput= nil;
}
return YES;
}
[mCaptureSession startRunning];
_currentDevice= self.inputDevice;
}
CVImageBufferRef imageBuffer = CVBufferRetain(mCurrentImageBuffer);
if (imageBuffer) {
CVPixelBufferLockBaseAddress(imageBuffer, 0);
NSLog(@"ColorSpace: %@", CVImageBufferGetColorSpace(imageBuffer));
//NSLog(@"ColorSpace: %@ Height: %@ Width: %@", CVImageBufferGetColorSpace(imageBuffer), CVPixelBufferGetWidth(imageBuffer), CVPixelBufferGetHeight(imageBuffer));
id provider= [context outputImageProviderFromBufferWithPixelFormat:QCPlugInPixelFormatARGB8
pixelsWide:CVPixelBufferGetWidth(imageBuffer)
pixelsHigh:CVPixelBufferGetHeight(imageBuffer)
baseAddress:CVPixelBufferGetBaseAddress(imageBuffer)
bytesPerRow:CVPixelBufferGetBytesPerRow(imageBuffer)
releaseCallback:_BufferReleaseCallback
releaseContext:imageBuffer
colorSpace:CVImageBufferGetColorSpace(imageBuffer)
shouldColorMatch:YES];
if(provider == nil) {
return NO;
}
self.outputImage = provider;
}
else
self.outputImage = nil;
return YES;
}
- (void) disableExecution:(id<QCPlugInContext>)context
{
}
- (void) stopExecution:(id<QCPlugInContext>)context
{
}
- (void)captureOutput:(QTCaptureOutput *)captureOutput
didOutputVideoFrame:(CVImageBufferRef)videoFrame
withSampleBuffer:(QTSampleBuffer *)sampleBuffer
fromConnection:(QTCaptureConnection *)connection
{
NSLog(@"connection type: %@", [connection mediaType]);
CVImageBufferRef imageBufferToRelease;
CVBufferRetain(videoFrame);
imageBufferToRelease = mCurrentImageBuffer;
@synchronized (self) {
mCurrentImageBuffer = videoFrame;
}
CVBufferRelease(imageBufferToRelease);
}
- (void)_devicesDidChange:(NSNotification *)aNotification
{
}
@end