我目前正在创建一个iOS应用程序,使用相机每秒捕获15帧,持续30秒(总共450帧)。问题是[self.session startRunning](提供的代码的最后一行)似乎不起作用;我这样说是因为我已经设置了一个名为色调的数组来获取应该捕获的450帧中每一帧的平均红色值。但是,即使在启动和停止检测后,阵列仍为空。我错过了什么?
#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>
@interface ViewController ()
@property (nonatomic, strong) AVCaptureSession *session;
@property (nonatomic, strong) NSMutableArray *hues;
@end
const int SECONDS = 15;
const int FPS = 30;
@implementation ViewController
- (void)viewDidLoad
{
[super viewDidLoad];
self.hues = [[NSMutableArray alloc] init]; // initiate things (from old code)
self.session = [[AVCaptureSession alloc] init];
self.session.sessionPreset = AVCaptureSessionPresetLow;
NSInteger numberOfFramesCaptured = self.hues.count;
// initialize the session with proper settings (from docs)
NSError *error = nil;
AVCaptureDevice *captureDevice; // initialize captureDevice and input, and add input (from old code)
AVCaptureDeviceInput *videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:captureDevice error:&error];
if ([self.session canAddInput:videoInput])
{ // fails
[self.session addInput:videoInput];
}
// find the max fps we can get from the given device (from old code)
AVCaptureDeviceFormat *currentFormat = [captureDevice activeFormat];
for (AVCaptureDeviceFormat *format in captureDevice.formats) // executes twice
{ // skips all of this
NSArray *ranges = format.videoSupportedFrameRateRanges;
AVFrameRateRange *frameRates = ranges[0];
// find the lowest resolution format at the frame rate we want (from old code)
if (frameRates.maxFrameRate == FPS && (!currentFormat || (CMVideoFormatDescriptionGetDimensions(format.formatDescription).width < CMVideoFormatDescriptionGetDimensions(currentFormat.formatDescription).width && CMVideoFormatDescriptionGetDimensions(format.formatDescription).height < CMVideoFormatDescriptionGetDimensions(currentFormat.formatDescription).height)))
{
currentFormat = format;
}
}
// tell the device to use the max frame rate (from old code)
[captureDevice lockForConfiguration:nil];
captureDevice.torchMode = AVCaptureTorchModeOn;
captureDevice.activeFormat = currentFormat;
captureDevice.activeVideoMinFrameDuration = CMTimeMake(1, FPS);
captureDevice.activeVideoMaxFrameDuration = CMTimeMake(1, FPS);
[captureDevice unlockForConfiguration];
// set the output (from old code)
AVCaptureVideoDataOutput *videoOutput = [[AVCaptureVideoDataOutput alloc] init];
// create a queue to run the capture on (from old code)
dispatch_queue_t queue = dispatch_queue_create("queue", NULL);
// setup our delegate (from old code)
[videoOutput setSampleBufferDelegate:self queue:queue];
// configure the pixel format (from old code)
videoOutput.videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA], (id)kCVPixelBufferPixelFormatTypeKey, nil];
videoOutput.alwaysDiscardsLateVideoFrames = NO;
[self.session addOutput:videoOutput];
// start the video session
[self.session startRunning]; // PROBLEM OCCURS HERE
答案 0 :(得分:0)
在您的代码中AVCaptureDevice *captureDevice;
从未初始化。