Xcode - 如何使用OpenGL暂停视频捕获

时间:2015-11-06 20:33:37

标签: ios xcode video opengl-es

我最初使用示例CapturePause代码编写了一些代码(这是Github上的基本示例代码链接 - https://github.com/cokecoffe/ios-demo/tree/master/capturepause/CapturePause)来暂停视频捕获,显然暂停捕获并不棘手,但下面的代码查找暂停标志,然后调整视频上的时间戳以确保没有间隙,这非常成功,这是查看这方面的方法:

- (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
BOOL bVideo = YES;
@synchronized(self)
{
    if (!self.isCapturing  || self.isPaused)
    {
        return;
    }
    if (connection != _videoConnection)
    {
        bVideo = NO;
    }
    if ((_encoder == nil) && !bVideo)
    {
        CMFormatDescriptionRef fmt = CMSampleBufferGetFormatDescription(sampleBuffer);
        [self setAudioFormat:fmt];
        NSString* filename = [NSString stringWithFormat:@"capture%d.mp4", _currentFile];
        NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];

        //additional quality encoding strings
        if (globalheightvalue == 0){
        }
        else {
            _cy = globalheightvalue;
            _cx = globalwidthvalue;
        }


        _encoder = [VideoEncoder encoderForPath:path Height:_cy width:_cx channels:_channels samples:_samplerate];
    }
    if (_discont)
    {
        if (bVideo)
        {
            return;
        }
        _discont = NO;
        // calc adjustment
        CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
        CMTime last = bVideo ? _lastVideo : _lastAudio;
        if (last.flags & kCMTimeFlags_Valid)
        {
            if (_timeOffset.flags & kCMTimeFlags_Valid)
            {
                pts = CMTimeSubtract(pts, _timeOffset);
            }
            CMTime offset = CMTimeSubtract(pts, last);
            NSLog(@"Setting offset from %s", bVideo?"video": "audio");
            NSLog(@"Adding %f to %f (pts %f)", ((double)offset.value)/offset.timescale, ((double)_timeOffset.value)/_timeOffset.timescale, ((double)pts.value/pts.timescale));

            // this stops us having to set a scale for _timeOffset before we see the first video time
            if (_timeOffset.value == 0)
            {
                _timeOffset = offset;
            }
            else
            {
                _timeOffset = CMTimeAdd(_timeOffset, offset);
            }
        }
        _lastVideo.flags = 0;
        _lastAudio.flags = 0;
    }

    // retain so that we can release either this or modified one
    CFRetain(sampleBuffer);

    if (_timeOffset.value > 0)
    {
        CFRelease(sampleBuffer);
        sampleBuffer = [self adjustTime:sampleBuffer by:_timeOffset];
    }

    // record most recent time so we know the length of the pause
    CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
    CMTime dur = CMSampleBufferGetDuration(sampleBuffer);
    if (dur.value > 0)
    {
        pts = CMTimeAdd(pts, dur);
    }
    if (bVideo)
    {
        _lastVideo = pts;
    }
    else
    {
        _lastAudio = pts;
    }
}

// pass frame to encoder
[_encoder encodeFrame:sampleBuffer isVideo:bVideo];
CFRelease(sampleBuffer);
}


- (CMSampleBufferRef) adjustTime:(CMSampleBufferRef) sample by:(CMTime) offset
{
CMItemCount count;
CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
CMSampleTimingInfo* pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count);
for (CMItemCount i = 0; i < count; i++)
{
    pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset);
    pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset);
}
CMSampleBufferRef sout;
CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout);
free(pInfo);
return sout;
}

现在我已经切换到使用基于OpenGL的代码出于各种原因并尝试获得相同的功能(基本代码来自此处 - https://github.com/BradLarson/GPUImage)。我想我需要在这方面做一些类似的事情,但我不完全确定OpenGL代码的复杂程度超出了我的想象!代码区域来自文件GPUImageVideoCamera.m:

- (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
{
if (capturePaused)
{ return;
}
CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
int bufferWidth = (int) CVPixelBufferGetWidth(cameraFrame);
int bufferHeight = (int) CVPixelBufferGetHeight(cameraFrame);
CFTypeRef colorAttachments = CVBufferGetAttachment(cameraFrame, kCVImageBufferYCbCrMatrixKey, NULL);
if (colorAttachments != NULL)
{
    if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo)
    {
        if (isFullYUVRange)
        {
            _preferredConversion = kColorConversion601FullRange;
        }
        else
        {
            _preferredConversion = kColorConversion601;
        }
    }
    else
    {
        _preferredConversion = kColorConversion709;
    }
}
else
{
    if (isFullYUVRange)
    {
        _preferredConversion = kColorConversion601FullRange;
    }
    else
    {
        _preferredConversion = kColorConversion601;
    }
}

CMTime currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);

[GPUImageContext useImageProcessingContext];

if ([GPUImageContext supportsFastTextureUpload] && captureAsYUV)
{
    CVOpenGLESTextureRef luminanceTextureRef = NULL;
    CVOpenGLESTextureRef chrominanceTextureRef = NULL;

//        if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
    if (CVPixelBufferGetPlaneCount(cameraFrame) > 0) // Check for YUV planar inputs to do RGB conversion
    {
        CVPixelBufferLockBaseAddress(cameraFrame, 0);

        if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) )
        {
            imageBufferWidth = bufferWidth;
            imageBufferHeight = bufferHeight;
        }

        CVReturn err;
        // Y-plane
        glActiveTexture(GL_TEXTURE4);
        if ([GPUImageContext deviceSupportsRedTextures])
        {
            err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
        }
        else
        {
            err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
        }
        if (err)
        {
            NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
        }

        luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef);
        glBindTexture(GL_TEXTURE_2D, luminanceTexture);
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

        // UV-plane
        glActiveTexture(GL_TEXTURE5);
        if ([GPUImageContext deviceSupportsRedTextures])
        {
            err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
        }
        else
        {
            err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
        }
        if (err)
        {
            NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
        }

        chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef);
        glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

//            if (!allTargetsWantMonochromeData)
//            {
            [self convertYUVToRGBOutput];
//            }

        int rotatedImageBufferWidth = bufferWidth, rotatedImageBufferHeight = bufferHeight;

        if (GPUImageRotationSwapsWidthAndHeight(internalRotation))
        {
            rotatedImageBufferWidth = bufferHeight;
            rotatedImageBufferHeight = bufferWidth;
        }

        [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:rotatedImageBufferWidth height:rotatedImageBufferHeight time:currentTime];

        CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
        CFRelease(luminanceTextureRef);
        CFRelease(chrominanceTextureRef);
    }
    else
    {
    }


    if (_runBenchmark)
    {
        numberOfFramesCaptured++;
        if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)
        {
            CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
            totalFrameTimeDuringCapture += currentFrameTime;
            NSLog(@"Average frame time : %f ms", [self averageFrameDurationDuringCapture]);
            NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime);
        }
    }
}
else
{
    CVPixelBufferLockBaseAddress(cameraFrame, 0);

    int bytesPerRow = (int) CVPixelBufferGetBytesPerRow(cameraFrame);
    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bytesPerRow / 4, bufferHeight) onlyTexture:YES];
    [outputFramebuffer activateFramebuffer];

    glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);

    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bytesPerRow / 4, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));

    [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bytesPerRow / 4 height:bufferHeight time:currentTime];

    CVPixelBufferUnlockBaseAddress(cameraFrame, 0);

    if (_runBenchmark)
    {
        numberOfFramesCaptured++;
        if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)
        {
            CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
            totalFrameTimeDuringCapture += currentFrameTime;
        }
    }
}  
}

作为参考,OpenGL示例中的示例代码是SimpleVideoFilter - 有关如何从此OpenGL代码的某些经验中添加适当的暂停功能的任何帮助将非常感谢 - Chaz

1 个答案:

答案 0 :(得分:0)

经过多次尝试和错误后,出现了一种似乎非常强大的方法,将上面的两个代码示例合并在一起,因此对于使用Brad Larson的OpenGl框架的任何人,您可以将此属性放入GPUImageVideoCamera.h中:

@property(readwrite, nonatomic) BOOL discont;

并将此代码替换为GPUImageVideoCamera.m文件中的captureoutput函数

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
if (!self.captureSession.isRunning)
{
    return;
}
if (capturePaused)
{
    return;
}
else if (captureOutput == audioOutput)
{
    [self processAudioSampleBuffer:sampleBuffer];
}
else
{
    if (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0)
    {
        return;
    }

if (_discont)
{
    _discont = NO;
    // calc adjustment
    CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
    CMTime last = _lastVideo;
    if (last.flags & kCMTimeFlags_Valid)
    {
        if (_timeOffset.flags & kCMTimeFlags_Valid)
        {
            pts = CMTimeSubtract(pts, _timeOffset);
        }
        CMTime offset = CMTimeSubtract(pts, last);
        NSLog(@"Adding %f to %f (pts %f)", ((double)offset.value)/offset.timescale, ((double)_timeOffset.value)/_timeOffset.timescale, ((double)pts.value/pts.timescale));

        // this stops us having to set a scale for _timeOffset before we see the first video time
        if (_timeOffset.value == 0)
        {
            _timeOffset = offset;
        }
        else
        {
            _timeOffset = CMTimeAdd(_timeOffset, offset);
        }

    }
    _lastVideo.flags = 0;
    _lastAudio.flags = 0;
}
// retain so that we can release either this or modified one
CFRetain(sampleBuffer);

if (_timeOffset.value > 0)
{
    CFRelease(sampleBuffer);
    sampleBuffer = [self adjustTime:sampleBuffer by:_timeOffset];
}

// record most recent time so we know the length of the pause
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CMTime dur = CMSampleBufferGetDuration(sampleBuffer);
if (dur.value > 0)
{
    pts = CMTimeAdd(pts, dur);
}
_lastVideo = pts;

    runAsynchronouslyOnVideoProcessingQueue(^{
        //Feature Detection Hook.
        if (self.delegate)
        {
            [self.delegate willOutputSampleBuffer:sampleBuffer];
        }

        [self processVideoSampleBuffer:sampleBuffer];

        CFRelease(sampleBuffer);
        dispatch_semaphore_signal(frameRenderingSemaphore);
    });
}
}

从您的主Viewcontroller中,代码非常简单(列为按钮操作,但可以放在大多数地方),并确保在启动时列出_discont = NO以保持其清洁:

- (IBAction)PauseButton:(id)sender 
{
[videoCamera pauseCameraCapture];
videoCamera.discont = YES;
}

- (IBAction)ResumeButton:(id)sender 
{
    [videoCamera resumeCameraCapture];
}

希望这可以帮助任何面临同样挑战的人

相关问题