由于某些奇怪的原因,AVCaptureVideoDataOutputSampleBufferDelegate不会触发。我已经添加了代理和所有内容,我不确定为什么它不会在我的代码中运行。任何人都可以帮我找出原因吗?
代表我的.h
@class AVPlayer;
@class AVPlayerClass;
@interface Camera : UIViewController <UIImagePickerControllerDelegate, UINavigationControllerDelegate, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureFileOutputRecordingDelegate> {
.m代码(在ViewDidLoad中调用initializeCamera)
-(void)initializeCamera {
Session = [[AVCaptureSession alloc]init];
[Session setSessionPreset:AVCaptureSessionPresetPhoto];
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
[Session addInput:audioInput];
// Preview Layer***************
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:Session];
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
CALayer *rootLayer = [[self view] layer];
[rootLayer setMasksToBounds:YES];
CGRect frame = self.CameraView.frame;
[previewLayer setFrame:frame];
[rootLayer insertSublayer:previewLayer atIndex:0];
[Session beginConfiguration];
//Remove existing input
[Session removeInput:newVideoInput];
newCamera = [self cameraWithPosition:AVCaptureDevicePositionBack];
// FrontCamera = NO;
[Session setSessionPreset:AVCaptureSessionPresetHigh];
if ([Session canSetSessionPreset:AVCaptureSessionPreset1920x1080])
//Check size based configs are supported before setting them
[Session setSessionPreset:AVCaptureSessionPreset1920x1080];
//Add input to session
NSError *err = nil;
newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:newCamera error:&err];
if(!newVideoInput || err)
{
NSLog(@"Error creating capture device input: %@", err.localizedDescription);
}
else if ([Session canAddInput:newVideoInput])
{
[Session addInput:newVideoInput];
}
[Session commitConfiguration];
stillImageOutput = [[AVCaptureStillImageOutput alloc]init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[Session addOutput:stillImageOutput];
MovieFileOutput = [[AVCaptureMovieFileOutput alloc]init];
Float64 TotalSeconds = 10;
int32_t preferredTimeScale = 60;
CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale);
MovieFileOutput.maxRecordedDuration = maxDuration;
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024;
if ([Session canAddOutput:MovieFileOutput])
[Session addOutput:MovieFileOutput];
// Create a VideoDataOutput and add it to the session
// AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
//
// [Session addOutput:output];
//
// // Configure your output.
//
// dispatch_queue_t queue = dispatch_get_main_queue();
//
// [output setSampleBufferDelegate:self queue:queue];
//
// // dispatch_release(queue);
//
// // Specify the pixel format
//
// output.videoSettings = [NSDictionary dictionaryWithObject:
//
// [NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
//
// forKey:(id)kCVPixelBufferPixelFormatTypeKey];
//
//
//
//
//
// AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init];
//
// [dataOutput setAlwaysDiscardsLateVideoFrames:YES];
// [dataOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
// forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
// [dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
//
// if ([Session canAddOutput:dataOutput])
// [Session addOutput:dataOutput];
// sessionに追加
// [self setupVideoOutput];
[Session setSessionPreset:AVCaptureSessionPresetHigh];
if ([Session canSetSessionPreset:AVCaptureSessionPreset1920x1080])
//Check size based configs are supported before setting them
[Session setSessionPreset:AVCaptureSessionPreset1920x1080];
[Session startRunning];
}
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef )sampleBuffer fromConnection:(AVCaptureConnection *)connections {
NSLog(@"Buff");
pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
VideoBuffer = pixelBuffer;
}
-(void)captureOutput:(AVCaptureOutput *)captureOutput didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
NSLog(@"The drop");
}
答案 0 :(得分:0)
我的代码没有触发AVCaptureVideoDataOutputSampleBufferDelegate,因为我使用的是AVCaptureMovieFileOutput而不是AVCaptureVideoDataOutput。 AVCaptureMovieFileOutput显然不使用样本缓冲区。我现在如何正确设置AVCaptureVideoDataOutput以使用样本缓冲区,我将发布我的代码。希望这有助于某人。