AVFoundation示例方法实现?

时间:2015-04-18 17:14:21

标签: ios camera avfoundation

IOS编码有点新鲜;虽然不是编程的新手;几年前,Jonathan Wight试图了解如何实现这个有趣的AVFoundation实施示例。

所以我将其复制并粘贴到自己的NSObject ccamera.h& ccamera.m类文件;然后将其声明为sharedInstance。我可以调用startRunning方法......但下一步有点丢失...

ccamera *newcamera = [ccamera sharedInstance];
[newcamera startRunning];

任何人都可以帮助/了解我接下来要做的事情,请使用此代码拍摄静止帧吗?

由于

//
//  CCamera.h
//  CCamera
//
//  Created by Jonathan Wight on 7/12/12.
//  Copyright (c) 2012 Jonathan Wight. All rights reserved.
//

#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>

@interface CCamera : NSObject

@property (readwrite, nonatomic, assign) AVCaptureDevicePosition captureDevicePosition;
@property (readwrite, nonatomic, strong) NSString *preset;
@property (readonly, nonatomic, strong) AVCaptureDevice *captureDevice;
@property (readonly, nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer;

+ (CCamera *)sharedInstance;
- (void)startRunning;
- (void)stopRunning;

- (CGSize)size;

- (void)captureStillCMSampleBuffer:(void (^)(CMSampleBufferRef sampleBuffer, NSError *error))inCompletionBlock;
- (void)captureStillCVImageBuffer:(void (^)(CVImageBufferRef imageBuffer, NSError *error))inCompletionBlock;
- (void)captureStillCIImage:(void (^)(CIImage *image, NSError *error))inCompletionBlock;
- (void)captureStillCGImage:(void (^)(CGImageRef image, NSError *error))inCompletionBlock;
- (void)captureStillUIImage:(void (^)(UIImage *image, NSError *error))inCompletionBlock;

@end



//
//  CCamera.m
//  Camera
//
//  Created by Jonathan Wight on 7/12/12.
//  Copyright (c) 2012 Jonathan Wight. All rights reserved.
//

#import "CCamera.h"

#import <AVFoundation/AVFoundation.h>
#import <QuartzCore/QuartzCore.h>

@interface CCamera ()
@property (readwrite, nonatomic, strong) AVCaptureSession *captureSession;
@property (readwrite, nonatomic, strong) AVCaptureDevice *captureDevice;
@property (readwrite, nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer;
@property (readwrite, nonatomic, strong) AVCaptureStillImageOutput *imageOutput;
@end

#pragma mark -

@implementation CCamera

static CCamera *gSharedInstance = NULL;

+ (CCamera *)sharedInstance
{
static dispatch_once_t sOnceToken = 0;
dispatch_once(&sOnceToken, ^{
    gSharedInstance = [[CCamera alloc] init];
    });
return(gSharedInstance);
}

- (id)init
{
if ((self = [super init]) != NULL)
    {
    _captureDevicePosition = AVCaptureDevicePositionUnspecified;
    _preset = AVCaptureSessionPresetPhoto;
    }
return(self);
}

- (void)dealloc
{
[_captureSession stopRunning];
}

- (AVCaptureDevice *)captureDevice
{
if (_captureDevice == NULL)
    {
    if (self.captureDevicePosition == AVCaptureDevicePositionUnspecified)
        {
        _captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
        }
    else
        {
        for (AVCaptureDevice *theDevice in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo])
            {
            if (theDevice.position == self.captureDevicePosition)
                {
                _captureDevice = theDevice;
                break;
                }
            }
        }
    }
return(_captureDevice);
}

- (AVCaptureVideoPreviewLayer *)previewLayer
{
if (_previewLayer == NULL)
    {
    _previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
    }
return(_previewLayer);
}

- (void)startRunning
{
NSError *theError = NULL;

self.captureSession = [[AVCaptureSession alloc] init];
self.captureSession.sessionPreset = self.preset;

AVCaptureDeviceInput *theCaptureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:self.captureDevice error:&theError];
[self.captureSession addInput:theCaptureDeviceInput];

self.imageOutput = [[AVCaptureStillImageOutput alloc] init];
self.imageOutput.outputSettings = @{
    (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)
    };
[self.captureSession addOutput:self.imageOutput];

[self.captureSession startRunning];
}

- (void)stopRunning
{
[self.captureSession stopRunning];

self.captureDevice = NULL;
self.captureSession = NULL;
self.imageOutput = NULL;
self.previewLayer = NULL;
}

- (CGSize)size
{
AVCaptureConnection *theConnection = [self.imageOutput.connections objectAtIndex:0];

__block BOOL theFinishedFlag = NO;
__block CGSize theSize;

[self.imageOutput captureStillImageAsynchronouslyFromConnection:theConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {

    CVImageBufferRef theImageBuffer = CMSampleBufferGetImageBuffer(imageDataSampleBuffer);
    theSize = CVImageBufferGetEncodedSize(theImageBuffer);
    theFinishedFlag = YES;
    }];

while (theFinishedFlag == NO)
    {
    [[NSRunLoop mainRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:0.1]];
    }

return(theSize);
}

#pragma mark -

- (void)captureStillCMSampleBuffer:(void (^)(CMSampleBufferRef sampleBuffer, NSError *error))inCompletionBlock
{
NSParameterAssert(inCompletionBlock != NULL);

AVCaptureConnection *theConnection = [self.imageOutput.connections objectAtIndex:0];

[self.imageOutput captureStillImageAsynchronouslyFromConnection:theConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
    inCompletionBlock(imageDataSampleBuffer, error);
    }];
}

- (void)captureStillCVImageBuffer:(void (^)(CVImageBufferRef imageBuffer, NSError *error))inCompletionBlock
{
NSParameterAssert(inCompletionBlock != NULL);

[self captureStillCMSampleBuffer:^(CMSampleBufferRef sampleBuffer, NSError *error) {
    CVImageBufferRef theImageBuffer = NULL;
    if (sampleBuffer != NULL)
        {
        theImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        }

    inCompletionBlock(theImageBuffer, error);
    }];
}

- (void)captureStillCIImage:(void (^)(CIImage *image, NSError *error))inCompletionBlock
{
NSParameterAssert(inCompletionBlock != NULL);

[self captureStillCVImageBuffer:^(CVImageBufferRef imageBuffer, NSError *error) {
    CIImage *theImage = NULL;
    if (imageBuffer != NULL)
        {
        theImage = [CIImage imageWithCVPixelBuffer:imageBuffer];
        }
    inCompletionBlock(theImage, error);
    }];
}

- (void)captureStillCGImage:(void (^)(CGImageRef image, NSError *error))inCompletionBlock
{
NSParameterAssert(inCompletionBlock != NULL);

[self captureStillCIImage:^(CIImage *image, NSError *error) {

    CGImageRef theCGImage = NULL;
    if (image != NULL)
        {
        NSDictionary *theOptions = @{
            // TODO
            };
        CIContext *theCIContext = [CIContext contextWithOptions:theOptions];
        theCGImage = [theCIContext createCGImage:image fromRect:image.extent];
        }

    inCompletionBlock(theCGImage, error);

    CGImageRelease(theCGImage);
    }];
}

- (void)captureStillUIImage:(void (^)(UIImage *image, NSError *error))inCompletionBlock
{
NSParameterAssert(inCompletionBlock != NULL);

[self captureStillCIImage:^(CIImage *image, NSError *error) {

    UIImage *theUIImage = NULL;
    if (image != NULL)
        {
        theUIImage = [UIImage imageWithCIImage:image];
        }

    inCompletionBlock(theUIImage, error);
    }];
}

@end

1 个答案:

答案 0 :(得分:0)

对不起,

刚刚找到了一个很好的小教程,并没有完全回答这个问题,但确实如此。

目标c中的AVFoundation基础教程,在Xcode 6.3 + Yosemite + IOS 8.3上运行魅力https://www.youtube.com/watch?v=iMafWBVtmTs