我无法让AVAssetWriterInputPixelBufferAdaptor导出一个小视频

时间:2011-05-26 21:01:30

标签: objective-c cocoa-touch avfoundation avassetwriter

下面的代码将编译并运行,但是当我尝试appendBuffer时,AVAssetWriterInputPixelBufferAdaptor返回NO:

有关为何的想法?

- (void) performCompression
{
    CGSize size = CGSizeMake(480, 320);

    NSString *compressionDirectory = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"];

    [self evaluateTrimmers];

    scrubTime = 0.0;
    currentFrameIndex = 0;  

    NSError *error = nil;

    //----initialize compression engine
    self.videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:[compressionDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.m4v",[[NSUserDefaults standardUserDefaults] valueForKey:@"RecentFileName"]]]]
                                                 fileType:AVFileTypeQuickTimeMovie
                                                    error:&error];
    NSParameterAssert(videoWriter);
    if(error)
        NSLog(@"error = %@", [error localizedDescription]);

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
                                                            [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                                                             [NSNumber numberWithInt:size.height], AVVideoHeightKey, nil];
    writerInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain]; //[[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];


    NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
                                                       [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

    adaptor = [[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                                            sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary] retain]; //[[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:writerInput
                                                                                                                                                                //sourcePixelBufferAttributes:[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil]];
    NSParameterAssert(writerInput);
    NSParameterAssert([videoWriter canAddInput:writerInput]);

    [videoWriter addInput:writerInput];

    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:kCMTimeZero];

    //---
    // insert demo debugging code to write the same image repeated as a movie

    CGImageRef theImage = [[UIImage imageNamed:@"Default-Landscape.png"] CGImage];

    CVPixelBufferRef buffer = NULL;
    buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:theImage size:size];
    [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];

    for (int i = 0; i < 120; i++)
    {       
        if (adaptor.assetWriterInput.readyForMoreMediaData)
        {
            printf("inside for loop %i\n", i);

            CMTime frameTime = CMTimeMake(1, 20);
            CMTime lastTime = CMTimeMake(i, 20);
            CMTime presentTime = CMTimeAdd(lastTime, frameTime);

            buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:theImage size:size];
            if (![adaptor appendPixelBuffer:buffer withPresentationTime:frameTime])
                NSLog(@"FAIL");
            CVBufferRelease(buffer);
        }
        else {
            printf("error\n");
            i--;
        }
    }

    NSLog(@"outside for loop");

    [writerInput markAsFinished];
    [videoWriter finishWriting];
}

- (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
        [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey, 
        [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
    CVPixelBufferRef pxbuffer = NULL;
    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, &pxbuffer);
    // CVReturn status = CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &pxbuffer);

    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);  

    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaNoneSkipFirst);
    NSParameterAssert(context);

    CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);

    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

    return pxbuffer;
}

1 个答案:

答案 0 :(得分:-1)

我把它全部搞定了!

我发现的问题是因为苹果错误:尝试使用相同的技术,但使用块而不是while循环:

[writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
    while ([writerInput isReadyForMoreMediaData])
    {
        if(++frame >= 120)
        {
            [writerInput markAsFinished];
            [videoWriter finishWriting];
            [videoWriter release];
            break;
        }

        CVPixelBufferRef buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:theImage size:size];
        if (buffer)
        {
            if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame, 20)])
                NSLog(@"FAIL");
            else
                NSLog(@"Success:%d", frame);
            CFRelease(buffer);
        }
    }
}];

以下是示例代码链接:git@github.com:RudyAramayo / AVAssetWriterInputPixelBufferAdaptorSample.git

以下是您需要的代码:

- (void) testCompressionSession
{
CGSize size = CGSizeMake(480, 320);


NSString *betaCompressionDirectory = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/Movie.m4v"];

NSError *error = nil;

unlink([betaCompressionDirectory UTF8String]);

//----initialize compression engine
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:betaCompressionDirectory]
                                                       fileType:AVFileTypeQuickTimeMovie
                                                          error:&error];
NSParameterAssert(videoWriter);
if(error)
    NSLog(@"error = %@", [error localizedDescription]);

NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
                               [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                               [NSNumber numberWithInt:size.height], AVVideoHeightKey, nil];
AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];

NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
                                                       [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                                                                                 sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);

if ([videoWriter canAddInput:writerInput])
    NSLog(@"I can add this input");
else
    NSLog(@"i can't add this input");

[videoWriter addInput:writerInput];

[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];

//---
// insert demo debugging code to write the same image repeated as a movie

CGImageRef theImage = [[UIImage imageNamed:@"Lotus.png"] CGImage];

dispatch_queue_t    dispatchQueue = dispatch_queue_create("mediaInputQueue", NULL);
int __block         frame = 0;

[writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
    while ([writerInput isReadyForMoreMediaData])
    {
        if(++frame >= 120)
        {
            [writerInput markAsFinished];
            [videoWriter finishWriting];
            [videoWriter release];
            break;
        }

        CVPixelBufferRef buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:theImage size:size];
        if (buffer)
        {
            if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame, 20)])
                NSLog(@"FAIL");
            else
                NSLog(@"Success:%d", frame);
            CFRelease(buffer);
        }
    }
}];

NSLog(@"outside for loop");

}


- (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                         [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey, 
                         [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, &pxbuffer);
// CVReturn status = CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &pxbuffer);

NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL); 

CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);

CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst);
NSParameterAssert(context);

CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);

CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);

CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

return pxbuffer;
}