如何在涉及AVCapture的mac终端上编译目标C代码

时间:2017-10-12 21:32:16

标签: objective-c macos terminal

我是目标C的初学者,需要在mac终端中编译一个程序,其中包括从两个网络摄像头捕获图像的AVCapture。该代码声明它需要使用

进行编译
#import <AVFoundation/AVFoundation.h>
#import <AppKit/AppKit.h>

@interface Capture : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
@property (weak) AVCaptureSession* session;
- (void) captureOutput: (AVCaptureOutput*) output
 didOutputSampleBuffer: (CMSampleBufferRef) buffer
        fromConnection: (AVCaptureConnection*) connection;
//- (void) captureOutput: (AVCaptureOutput*) output
//   didDropSampleBuffer: (CMSampleBufferRef) buffer
//        fromConnection: (AVCaptureConnection*) connection;
@end
@interface Capture ()
{
  CVImageBufferRef head;
  CFRunLoopRef runLoop;
  int count;
  int secret;
}
- (void) save;
@end

@implementation Capture
@synthesize session;

- (id) initWithInteger: (int) s
{
  self = [super init];
  runLoop = CFRunLoopGetCurrent();
  head = nil;
  count = 0;
  secret = s;
  return self;
}

- (void) dealloc
{
  @synchronized (self) {
    CVBufferRelease(head);
  }
  NSLog(@"capture released");
}

- (void) save
{
  @synchronized (self) {
    CIImage* ciImage =
      [CIImage imageWithCVImageBuffer: head];
    NSBitmapImageRep* bitmapRep =
      [[NSBitmapImageRep alloc] initWithCIImage: ciImage];

    NSData* jpgData =
      [bitmapRep representationUsingType:NSJPEGFileType properties: nil];
    NSString* filename = [NSString stringWithFormat: @"result_%d.jpg", secret];
    [jpgData writeToFile: filename atomically: NO];
    //NSData* pngData =
    //  [bitmapRep representationUsingType:NSPNGFileType properties: nil];
    //[pngData writeToFile: @"result.png" atomically: NO];
  }
  NSLog(@"Saved");
}

- (void) captureOutput: (AVCaptureOutput*) output
   didOutputSampleBuffer: (CMSampleBufferRef) buffer
        fromConnection: (AVCaptureConnection*) connection
{
#pragma unused (output)
#pragma unused (connection)
  CVImageBufferRef frame = CMSampleBufferGetImageBuffer(buffer);
  CVImageBufferRef prev;
  CVBufferRetain(frame);
  @synchronized (self) {
    prev = head;
    head = frame;
    count++;
    NSLog(@"Captured");
  }
  CVBufferRelease(prev);
  if (count == 5) {
    // after skipped 5 frames
    [self save];
    [self.session stopRunning];
    CFRunLoopStop(runLoop);
  }
}
//- (void) captureOutput: (AVCaptureOutput*) output
//   didDropSampleBuffer: (CMSampleBufferRef) buffer
//        fromConnection: (AVCaptureConnection*) connection
//{
//#pragma unused (output)
//#pragma unused (buffer)
//#pragma unused (connection)
//}
@end


int quit(NSError * error)
{
  NSLog(@"[error] %@", [error localizedDescription]);
  return 1;
}

int main()
{
  NSError* error = nil;
  Capture* capture_1 = [[Capture alloc] initWithInteger: 1];
  Capture* capture_2 = [[Capture alloc] initWithInteger: 2];

  //NSArray* devices =
  //  [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
  //AVCaptureDevice* device = [devices objectAtIndex: 0];
  NSArray* devices = [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo];
  for(id obj in devices){
    NSLog(@"[XX device] %@", obj);
  }

  AVCaptureDevice* logitech_1 = [AVCaptureDevice deviceWithUniqueID:@"0x14344000046d081b"];
  AVCaptureDevice* logitech_2 = [AVCaptureDevice deviceWithUniqueID:@"0x14342000046d081b"];

  NSLog(@"[check device] %@", logitech_1);
  NSLog(@"[check device] %@", logitech_2);

  /*AVCaptureDevice* device =
    [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeVideo];*/
  AVCaptureDevice* device_1 = logitech_1;
  AVCaptureDevice* device_2 = logitech_2;

  //NSLog(@"[device] %@", device);

  AVCaptureDeviceInput* input_1 =
    [AVCaptureDeviceInput deviceInputWithDevice: logitech_1  error: &error];
  NSLog(@"[input] %@", input_1);

  AVCaptureDeviceInput* input_2 =
    [AVCaptureDeviceInput deviceInputWithDevice: logitech_2  error: &error];
  NSLog(@"[input] %@", input_2);

  AVCaptureVideoDataOutput* output_1 =
    [[AVCaptureVideoDataOutput alloc] init];
  [output_1 setSampleBufferDelegate: capture_1 queue: dispatch_get_main_queue()];
  NSLog(@"[output] %@", output_1);
  AVCaptureVideoDataOutput* output_2 =
    [[AVCaptureVideoDataOutput alloc] init];
  [output_2 setSampleBufferDelegate: capture_2 queue: dispatch_get_main_queue()];
  NSLog(@"[output] %@", output_2);

  AVCaptureSession* session_1 = [[AVCaptureSession alloc] init];
  [session_1 addInput: input_1];
  [session_1 addOutput: output_1];

  AVCaptureSession* session_2 = [[AVCaptureSession alloc] init];
  [session_2 addInput: input_2];
  [session_2 addOutput: output_2];

  capture_1.session = session_1;
  capture_2.session = session_2;
  [session_2 startRunning];

  NSLog(@"Started");
  CFRunLoopRun();

  [session_1 startRunning];
  CFRunLoopRun();

  NSLog(@"Stopped");
  return 0;
}

要编译的命令:

clang -fobjc-arc -Wall -Wextra -pedantic avcapture.m
     -framework Cocoa -framework AVFoundation -framework CoreMedia
     -framework QuartzCore -o avcapture

在运行第一个命令时,我收到警告和以下错误

clang: error: linker command failed with exit code 1 (use -v to see invocation)

有人可以帮我解决这个问题吗?

1 个答案:

答案 0 :(得分:0)

使用命令clang -fobjc-arc -framework AVFoundation -framework CoreMedia -framework CoreImage -framework Cocoa -framework QuartzCore avcapture.m,您的文件将编译并生成a.out可执行文件。