UIImage到CVPixelBufferRef为空

时间:2016-04-28 22:28:40

标签: ios objective-c uiimage

我正在使用此代码创建CVPixelBufferRef

NSDictionary *videoSettings = @{AVVideoCodecKey: AVVideoCodecH264,
                                    AVVideoWidthKey: [NSNumber numberWithInt:size.width],
                                    AVVideoHeightKey: [NSNumber numberWithInt:size.height]};

self.writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
                                                                         outputSettings:videoSettings];

self.adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.writerInput sourcePixelBufferAttributes:nil];

CVPixelBufferRef buffer;
CVPixelBufferPoolCreatePixelBuffer(NULL, self.adaptor.pixelBufferPool, &buffer);
buffer = [self pixelBufferFromCGImage:[frame CGImage] size:self.videoSize];

这是pixelBufferFromCGImage函数:

+ (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image
                                  size:(CGSize)imageSize
{
    NSDictionary *options = @{(id)kCVPixelBufferCGImageCompatibilityKey: @YES,
                              (id)kCVPixelBufferCGBitmapContextCompatibilityKey: @YES};
    CVPixelBufferRef pxbuffer = NULL;
    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, imageSize.width,
                                          imageSize.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
                                          &pxbuffer);
    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);

    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, imageSize.width,
                                             imageSize.height, 8, 4*imageSize.width, rgbColorSpace,
                                             kCGImageAlphaNoneSkipFirst);
    NSParameterAssert(context);

    CGContextDrawImage(context, CGRectMake(0 + (imageSize.width-CGImageGetWidth(image))/2,
                                       (imageSize.height-CGImageGetHeight(image))/2,
                                       CGImageGetWidth(image),
                                       CGImageGetHeight(image)), image);
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

    return pxbuffer;
}

问题是buffer总是空的,知道它为什么会发生?

1 个答案:

答案 0 :(得分:2)

我猜缓冲区总是为nil,因为“videoSize”无效。我已经在3个场景中测试了您的代码,这里是结果

第一个我传递了有效参数,图像大小为200x200。缓冲区不是零。

第二个我通过了nil的图像和视频大小200x200。缓冲区不是零。

最后一个我传递了有效图像,但视频大小为0x0,缓冲区为零。返回状态为

kCVReturnInvalidArgument
Invalid function parameter. For example, out of range or the wrong type.

Value
-6661

Description
Invalid function parameter. For example, out of range or the wrong type.

希望这会对你有所帮助。

这是我测试的代码。

- (void)viewDidLoad
{
    [super viewDidLoad];

    UIImage *image = [UIImage imageNamed:@"Doge-Meme.jpg"];
    CVPixelBufferRef bufferRef = [[self class] pixelBufferFromCGImage:image.CGImage size:CGSizeMake(200, 200)];
    NSLog(@"## %@", bufferRef);//not nil

    CVPixelBufferRef bufferRef1 = [[self class] pixelBufferFromCGImage:nil size:CGSizeMake(200, 200)];
    NSLog(@"## %@", bufferRef1);//not nil

    CVPixelBufferRef bufferRef2 = [[self class] pixelBufferFromCGImage:image.CGImage size:CGSizeMake(0, 0)];
    NSLog(@"## %@", bufferRef2);//nil
}

+ (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image
                                      size:(CGSize)imageSize
{
    NSDictionary *options = @{(id)kCVPixelBufferCGImageCompatibilityKey: @YES,
                              (id)kCVPixelBufferCGBitmapContextCompatibilityKey: @YES};
    CVPixelBufferRef pxbuffer = NULL;
    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, imageSize.width,
                                          imageSize.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
                                          &pxbuffer);
    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);

    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, imageSize.width,
                                                 imageSize.height, 8, 4*imageSize.width, rgbColorSpace,
                                                 kCGImageAlphaNoneSkipFirst);
    NSParameterAssert(context);

    CGContextDrawImage(context, CGRectMake(0 + (imageSize.width-CGImageGetWidth(image))/2,
                                           (imageSize.height-CGImageGetHeight(image))/2,
                                           CGImageGetWidth(image),
                                           CGImageGetHeight(image)), image);
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

    return pxbuffer;
}