AVCaptureSession captureOutput回调会丢帧并引发OutOfBuffers错误

时间:2018-10-21 05:09:39

标签: ios avfoundation opengl-es-2.0 avcapturesession

我正在尝试从后置摄像头获取AVCaptureSession并将其传输到映射到四边形的纹理。

查看complete source here

无论我使用什么预设,didDropSampleBuffer回调都会报告'OutOfBuffers'。我试图复制传递给didOutputSampleBuffer的sampleBuffer,但是也许我的实现有问题。

我也尝试过使用SERIAL_QUEUE,因为我知道captureSession的startRecording是一个阻塞函数,不应将其放在主队列中。但是,使用主队列是我能够看到任何帧的唯一方法。

这是我的AV设置:

- (void)setupAV
{
    _sessionQueue = dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL);

    CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, self.context, NULL, &_videoTextureCache);
    if (err) {
        NSLog(@"Couldn't create video cache.");
        return;
    }

    self.captureSession = [[AVCaptureSession alloc] init];
    if (!self.captureSession) {
        return;
    }

    [self.captureSession beginConfiguration];

    self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;

    AVCaptureDevicePosition devicePosition = AVCaptureDevicePositionBack;

    AVCaptureDeviceDiscoverySession *deviceDiscoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo position:devicePosition];

    for (AVCaptureDevice *device in deviceDiscoverySession.devices) {
        if (device.position == devicePosition) {
            self.captureDevice = device;
            if (self.captureDevice != nil) {
                break;
            }
        }
    }

    NSError *captureDeviceError = nil;
    AVCaptureDeviceInput *input = [[AVCaptureDeviceInput alloc] initWithDevice:self.captureDevice error:&captureDeviceError];
    if (captureDeviceError) {
        NSLog(@"Couldn't configure device input.");
        return;
    }

    if (![self.captureSession canAddInput:input]) {
        NSLog(@"Couldn't add video input.");
        [self.captureSession commitConfiguration];
        return;
    }

    [self.captureSession addInput:input];

    self.videoOutput = [[AVCaptureVideoDataOutput alloc] init];
    if (!self.videoOutput) {
        NSLog(@"Error creating video output.");
        [self.captureSession commitConfiguration];
        return;
    }

    self.videoOutput.alwaysDiscardsLateVideoFrames = YES;
    NSDictionary *settings = [[NSDictionary alloc] initWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];
    self.videoOutput.videoSettings = settings;

    [self.videoOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];

    if ([self.captureSession canAddOutput:self.videoOutput]) {
        [self.captureSession addOutput:self.videoOutput];
    } else {
        NSLog(@"Couldn't add video output.");
        [self.captureSession commitConfiguration];
        return;
    }

    if (self.captureSession.isRunning) {
        NSLog(@"Session is already running.");
        [self.captureSession commitConfiguration];
        return;
    }

    //            NSError *configLockError;
    //            int frameRate = 24;
    //            [self.captureDevice lockForConfiguration:&configLockError];
    //            self.captureDevice.activeVideoMinFrameDuration = CMTimeMake(1, frameRate);
    //            self.captureDevice.activeVideoMaxFrameDuration = CMTimeMake(1, frameRate);
    //            [self.captureDevice unlockForConfiguration];
    //
    //            if (configLockError) {
    //                NSLog(@"Error locking for configuration. %@", configLockError);
    //            }

    [self.captureSession commitConfiguration];
}

这是我的captureOutput回调:

- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    //    if (_sampleBuffer) {
    //        CFRelease(_sampleBuffer);
    //        _sampleBuffer = nil;
    //    }
    //
    //    OSStatus status = CMSampleBufferCreateCopy(kCFAllocatorDefault, sampleBuffer, &_sampleBuffer);
    //    if (noErr != status) {
    //        _sampleBuffer = nil;
    //    }
    //
    //    if (!_sampleBuffer) {
    //        return;
    //    }

    CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);

    if (!_videoTextureCache) {
        NSLog(@"No video texture cache");
        return;
    }

    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    size_t width = CVPixelBufferGetWidth(pixelBuffer);
    size_t height = CVPixelBufferGetHeight(pixelBuffer);

    _rgbaTexture = nil;
    // Periodic texture cache flush every frame
    CVOpenGLESTextureCacheFlush(_videoTextureCache, 0);

    // CVOpenGLESTextureCacheCreateTextureFromImage will create GLES texture
    // optimally from CVImageBufferRef.
    glActiveTexture(GL_TEXTURE0);
    CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
                                                                _videoTextureCache,
                                                                pixelBuffer,
                                                                NULL,
                                                                GL_TEXTURE_2D,
                                                                GL_RGBA,
                                                                (GLsizei)width,
                                                                (GLsizei)height,
                                                                GL_BGRA,
                                                                GL_UNSIGNED_BYTE,
                                                                0,
                                                                &_rgbaTexture);

    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);

    if (err) {
        NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
    }

    if (_rgbaTexture) {
        glBindTexture(CVOpenGLESTextureGetTarget(_rgbaTexture), CVOpenGLESTextureGetName(_rgbaTexture));
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    }
}

为完整起见,以下是ivar和属性声明:

@interface AVViewController () <AVCaptureVideoDataOutputSampleBufferDelegate> {

    CVOpenGLESTextureRef _rgbaTexture;
    CVOpenGLESTextureCacheRef _videoTextureCache;
    dispatch_queue_t _sessionQueue;
    GLuint _program;
    GLuint _vertexArray;
    GLuint _vertexBuffer;
    CMSampleBufferRef _sampleBuffer;
}

@property (nonatomic, strong) EAGLContext *context;
@property (nonatomic, strong) AVCaptureSession *captureSession;
@property (nonatomic, strong) AVCaptureDevice *captureDevice;
@property (nonatomic, strong) AVCaptureVideoDataOutput *videoOutput;
@property (readwrite) GLint vertexAttrib;
@property (readwrite) GLint textureAttrib;
@property (readwrite) GLint videoFrameUniform;

我已经搜索了并且无法找到解决方案。任何帮助将不胜感激。

0 个答案:

没有答案