我正在尝试为Quartz Composer创建一个自定义补丁,它将像视频输入补丁一样运行,但在输入端口上有一个可选择的捕获设备。它是一个小补丁,看起来对我来说,但当我连接DV设备(Canopus ADVC-110)并选择它时,ColorSpace是(null),我得到一个例外。它适用于FaceTime HD摄像头,这是一种视频媒体类型。我一定错过了什么,但我看不到它。
委托函数captureOutput反复触发,就像有新帧一样,捕获似乎开始正常。我错过了什么?
#import <OpenGL/CGLMacro.h>
#import "CaptureWithDevice.h"
#define kQCPlugIn_Name @"Capture With Device"
#define kQCPlugIn_Description @"Servies as a replacement for the default Video Input patch, and differs in that it allows the input device to be specified by the user."
@implementation CaptureWithDevice
@dynamic inputDevice, outputImage;
+ (NSDictionary*) attributes
{
return [NSDictionary dictionaryWithObjectsAndKeys:
kQCPlugIn_Name, QCPlugInAttributeNameKey,
kQCPlugIn_Description, QCPlugInAttributeDescriptionKey,
nil];
}
+ (NSDictionary*) attributesForPropertyPortWithKey:(NSString*)key
{
if([key isEqualToString:@"inputDevice"]) {
NSArray *videoDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
NSArray *muxedDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeMuxed];
NSMutableArray *mutableArrayOfDevice = [[NSMutableArray alloc] init ];
[mutableArrayOfDevice addObjectsFromArray:videoDevices];
[mutableArrayOfDevice addObjectsFromArray:muxedDevices];
NSArray *devices = [NSArray arrayWithArray:mutableArrayOfDevice];
[mutableArrayOfDevice release];
NSMutableArray *deviceNames= [NSMutableArray array];
int i, ic= [devices count];
for(i= 0; i<ic; i++) {
[deviceNames addObject:[[devices objectAtIndex:i] description]];
// be sure not to add CT to the list
}
return [NSDictionary dictionaryWithObjectsAndKeys:
@"Device", QCPortAttributeNameKey,
QCPortTypeIndex,QCPortAttributeTypeKey,
[NSNumber numberWithInt:0], QCPortAttributeMinimumValueKey,
deviceNames, QCPortAttributeMenuItemsKey,
[NSNumber numberWithInt:ic-1], QCPortAttributeMaximumValueKey,
nil];
}
if([key isEqualToString:@"outputImage"])
return [NSDictionary dictionaryWithObjectsAndKeys:
@"Video Image", QCPortAttributeNameKey,
nil];
return nil;
}
+ (QCPlugInExecutionMode) executionMode
{
return kQCPlugInExecutionModeProvider;
}
+ (QCPlugInTimeMode) timeMode
{
return kQCPlugInTimeModeIdle;
}
- (id) init
{
if(self = [super init]) {
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(_devicesDidChange:)
name:QTCaptureDeviceWasConnectedNotification
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(_devicesDidChange:)
name:QTCaptureDeviceWasDisconnectedNotification
object:nil];
}
return self;
}
- (void) finalize
{
[super finalize];
}
- (void) dealloc
{
if (mCaptureSession) {
[mCaptureSession release];
[mCaptureDeviceInput release];
[mCaptureDecompressedVideoOutput release];
}
[[NSNotificationCenter defaultCenter] removeObserver:self];
[super dealloc];
}
@end
@implementation CaptureWithDevice (Execution)
- (BOOL) startExecution:(id<QCPlugInContext>)context
{
return YES;
}
- (void) enableExecution:(id<QCPlugInContext>)context
{
}
static void _BufferReleaseCallback(const void* address, void* info)
{
CVPixelBufferUnlockBaseAddress(info, 0);
CVBufferRelease(info);
}
- (BOOL) execute:(id<QCPlugInContext>)context atTime:(NSTimeInterval)time withArguments:(NSDictionary*)arguments
{
if (!mCaptureSession || [mCaptureSession isRunning]==NO || _currentDevice!=self.inputDevice){
NSError *error = nil;
BOOL success;
NSArray *videoDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
NSArray *muxedDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeMuxed];
NSMutableArray *mutableArrayOfDevice = [[NSMutableArray alloc] init ];
[mutableArrayOfDevice addObjectsFromArray:videoDevices];
[mutableArrayOfDevice addObjectsFromArray:muxedDevices];
NSArray *devices = [NSArray arrayWithArray:mutableArrayOfDevice];
[mutableArrayOfDevice release];
NSUInteger d= self.inputDevice;
if (!(d<[devices count])) {
d= 0;
}
QTCaptureDevice *device = [devices objectAtIndex:d];
success = [device open:&error];
if (!success) {
NSLog(@"Could not open device %@", device);
self.outputImage = nil;
return YES;
}
NSLog(@"Opened device successfully");
[mCaptureSession release];
mCaptureSession = [[QTCaptureSession alloc] init];
[mCaptureDeviceInput release];
mCaptureDeviceInput = [[QTCaptureDeviceInput alloc] initWithDevice:device];
// if the device is a muxed connection make sure to get the right connection
if ([muxedDevices containsObject:device]) {
NSLog(@"Disabling audio connections");
NSArray *ownedConnections = [mCaptureDeviceInput connections];
for (QTCaptureConnection *connection in ownedConnections) {
NSLog(@"MediaType: %@", [connection mediaType]);
if ( [[connection mediaType] isEqualToString:QTMediaTypeSound]) {
[connection setEnabled:NO];
NSLog(@"disabling audio connection");
}
}
}
success = [mCaptureSession addInput:mCaptureDeviceInput error:&error];
if (!success) {
NSLog(@"Failed to add Input");
self.outputImage = nil;
if (mCaptureSession) {
[mCaptureSession release];
mCaptureSession= nil;
}
if (mCaptureDeviceInput) {
[mCaptureDeviceInput release];
mCaptureDeviceInput= nil;
}
return YES;
}
NSLog(@"Adding output");
[mCaptureDecompressedVideoOutput release];
mCaptureDecompressedVideoOutput = [[QTCaptureDecompressedVideoOutput alloc] init];
[mCaptureDecompressedVideoOutput setPixelBufferAttributes:
[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferOpenGLCompatibilityKey,
[NSNumber numberWithLong:k32ARGBPixelFormat], kCVPixelBufferPixelFormatTypeKey, nil]];
[mCaptureDecompressedVideoOutput setDelegate:self];
success = [mCaptureSession addOutput:mCaptureDecompressedVideoOutput error:&error];
if (!success) {
NSLog(@"Failed to add output");
self.outputImage = nil;
if (mCaptureSession) {
[mCaptureSession release];
mCaptureSession= nil;
}
if (mCaptureDeviceInput) {
[mCaptureDeviceInput release];
mCaptureDeviceInput= nil;
}
if (mCaptureDecompressedVideoOutput) {
[mCaptureDecompressedVideoOutput release];
mCaptureDecompressedVideoOutput= nil;
}
return YES;
}
[mCaptureSession startRunning];
_currentDevice= self.inputDevice;
}
CVImageBufferRef imageBuffer = CVBufferRetain(mCurrentImageBuffer);
if (imageBuffer) {
CVPixelBufferLockBaseAddress(imageBuffer, 0);
NSLog(@"ColorSpace: %@", CVImageBufferGetColorSpace(imageBuffer));
//NSLog(@"ColorSpace: %@ Height: %@ Width: %@", CVImageBufferGetColorSpace(imageBuffer), CVPixelBufferGetWidth(imageBuffer), CVPixelBufferGetHeight(imageBuffer));
id provider= [context outputImageProviderFromBufferWithPixelFormat:QCPlugInPixelFormatARGB8
pixelsWide:CVPixelBufferGetWidth(imageBuffer)
pixelsHigh:CVPixelBufferGetHeight(imageBuffer)
baseAddress:CVPixelBufferGetBaseAddress(imageBuffer)
bytesPerRow:CVPixelBufferGetBytesPerRow(imageBuffer)
releaseCallback:_BufferReleaseCallback
releaseContext:imageBuffer
colorSpace:CVImageBufferGetColorSpace(imageBuffer)
shouldColorMatch:YES];
if(provider == nil) {
return NO;
}
self.outputImage = provider;
}
else
self.outputImage = nil;
return YES;
}
- (void) disableExecution:(id<QCPlugInContext>)context
{
}
- (void) stopExecution:(id<QCPlugInContext>)context
{
}
- (void)captureOutput:(QTCaptureOutput *)captureOutput
didOutputVideoFrame:(CVImageBufferRef)videoFrame
withSampleBuffer:(QTSampleBuffer *)sampleBuffer
fromConnection:(QTCaptureConnection *)connection
{
NSLog(@"connection type: %@", [connection mediaType]);
CVImageBufferRef imageBufferToRelease;
CVBufferRetain(videoFrame);
imageBufferToRelease = mCurrentImageBuffer;
@synchronized (self) {
mCurrentImageBuffer = videoFrame;
}
CVBufferRelease(imageBufferToRelease);
}
- (void)_devicesDidChange:(NSNotification *)aNotification
{
}
@end
答案 0 :(得分:1)
我设法通过从mCaptureDecompressedVideoOutput中删除kCVPixelBufferOpenGLCompatibilityKey来使此补丁与Video和Muxed输入一起使用。虽然这允许补丁在Quartz Composer中完美地工作,但我的意图是在CamTwist内部使用的合成中运行此补丁,看起来不需要OpenGL支持。现在,它只显示一个带有视频或多路复用输入的黑屏,之前它正在使用视频输入。所以,我要将我的CVImageBufferRef转换为OpenGL纹理,看看我是否可以使用
outputImageProviderFromTextureWithPixelFormat:pixelsWide:pixelsHigh:name:flipped:releaseCallback:releaseContext:colorSpace:shouldColorMatch