屏幕捕获包括带覆盖按钮的AVCaptureVideoPreviewLayer

时间:2013-11-05 09:38:05

标签: ios iphone objective-c avcapturesession

我正在使用screen Recorder来屏蔽屏幕。在iphone屏幕中填充视图时,它非常有效。当AVCaptureVideoPreviewLayer显示叠加按钮时,保存的屏幕捕获视频会显示不带AVCaptureVideoPreviewLayer的叠加按钮。我使用了this教程来添加叠加层。如何解决这个问题?

1 个答案:

答案 0 :(得分:1)

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{

    @autoreleasepool {

            if ([connection isVideoOrientationSupported])
                [connection setVideoOrientation:[self cameraOrientation]];

        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        /*Lock the image buffer*/
        CVPixelBufferLockBaseAddress(imageBuffer,0);
        /*Get information about the image*/
        uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);

        size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
        size_t width = CVPixelBufferGetWidth(imageBuffer);
        size_t height = CVPixelBufferGetHeight(imageBuffer);

        /*Create a CGImageRef from the CVImageBufferRef*/
        CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
        CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
        CGImageRef newImage = CGBitmapContextCreateImage(newContext);

        /*We release some components*/

        CVPixelBufferUnlockBaseAddress(imageBuffer,0);

        CGContextRelease(newContext);
        CGColorSpaceRelease(colorSpace);

        UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];
        image1= [UIImage imageWithCGImage:newImage];
        /*We relase the CGImageRef*/

        CGImageRelease(newImage);


        dispatch_sync(dispatch_get_main_queue(), ^{
            [self.imageView setImage:image1];
        });

    }

}

使用writeaSample运行NSTimer

-(void) writeSample: (NSTimer*) _timer {

    if (assetWriterInput.readyForMoreMediaData) {
        // CMSampleBufferRef sample = nil;
        @autoreleasepool {
            CVReturn cvErr = kCVReturnSuccess;

            // get screenshot image!

            UIGraphicsBeginImageContext(baseViewOne.frame.size);
            [[baseViewOne layer] renderInContext:UIGraphicsGetCurrentContext()];
            screenshota = UIGraphicsGetImageFromCurrentImageContext();
            UIGraphicsEndImageContext();


            //CGImageRef image = (CGImageRef) [[self screenshot] CGImage];
            CGImageRef image = (CGImageRef) [screenshota CGImage];
            //NSLog (@"made screenshot");

            // prepare the pixel buffer
            CVPixelBufferRef pixelBuffer = NULL;
            CFDataRef imageData= CGDataProviderCopyData(CGImageGetDataProvider(image));
            //NSLog (@"copied image data");
            cvErr = CVPixelBufferCreateWithBytes(kCFAllocatorDefault,
                                                 baseViewOne.frame.size.width,baseViewOne.frame.size.height,
                                                 kCVPixelFormatType_32BGRA,
                                                 (void*)CFDataGetBytePtr(imageData),
                                                 CGImageGetBytesPerRow(image),
                                                 NULL,
                                                 NULL,
                                                 NULL,
                                                 &pixelBuffer);
            //NSLog (@"CVPixelBufferCreateWithBytes returned %d", cvErr);

            // calculate the time
            CMTime presentationTime;

                CFAbsoluteTime thisFrameWallClockTime = CFAbsoluteTimeGetCurrent();
                elapsedTime = thisFrameWallClockTime - (firstFrameWallClockTime+pausedFrameTime);
                // NSLog (@"elapsedTime: %f", elapsedTime);
                presentationTime =  CMTimeMake (elapsedTime * TIME_SCALE, TIME_SCALE);
                BOOL appended = [assetWriterPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];

                if (appended) {
                    CVPixelBufferRelease( pixelBuffer );
                    CFRelease(imageData);
                    pixelBuffer = nil;
                    //NSLog (@"appended sample at time %lf", CMTimeGetSeconds(presentationTime));
                } else {
                    [self stopRecording];
                }



        }
    }


}