将图像转换为视频iOS5

时间:2012-06-22 02:02:03

标签: objective-c ios video-capture avassetwriter

我正在编写一个在屏幕上绘制的应用程序,然后将一系列屏幕抓取的图像转换为视频。然而,最终视频显示有点偏离(似乎已经旋转)。

基本上我拍摄的视频应该是这样的: In Simulator:-->


然而,在最终视频中似乎是这样的: recorded video:-->

我的代码如下所示。我猜这个问题出在PixelBuffer逻辑和放大器中。我需要应用某种类型的转换。有什么建议吗?

- (void)exportImages:(NSMutableArray *)imageArray 
   asVideoToPath:(NSString *)path 
   withFrameSize:(CGSize)imageSize
 framesPerSecond:(NSUInteger)fps {

NSFileManager *fileManager = [[NSFileManager alloc] init];

if([fileManager fileExistsAtPath:path]) 
    [fileManager removeItemAtPath:path error:NULL];

NSLog(@"Start building video from defined frames.");

NSError *error = nil;

AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                              [NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
                                                          error:&error];    
NSParameterAssert(videoWriter);

NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                               AVVideoCodecH264, AVVideoCodecKey,
                               [NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,
                               [NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
                               nil];

AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
                                        assetWriterInputWithMediaType:AVMediaTypeVideo
                                        outputSettings:videoSettings];


AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                 assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
                                                 sourcePixelBufferAttributes:nil];

NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];

//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];

CVPixelBufferRef buffer = NULL;

//convert uiimage to CGImage.
int frameCount = 0;

for(VideoFrame * frm in imageArray) {
    UIImage * img = frm._imageFrame;
    buffer = [self pixelBufferFromCGImage:[img CGImage]];

    BOOL append_ok = NO;
    int j = 0;
    while (!append_ok && j < 30) {
        if (adaptor.assetWriterInput.readyForMoreMediaData)  {
            //print out status::
            NSString *border = @"**************************************************";
            NSLog(@"\n%@\nProcessing video frame (%d,%d).\n%@",border,frameCount,[imageArray count],border);

            CMTime frameTime = CMTimeMake(frameCount,(int32_t) fps);
            append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
            if(!append_ok){
                NSError *error = videoWriter.error;
                if(error!=nil) {
                    NSLog(@"Unresolved error %@,%@.", error, [error userInfo]);
                }
            }

        } 
        else {
            printf("adaptor not ready %d, %d\n", frameCount, j);
            [NSThread sleepForTimeInterval:0.1];
        }
        j++;
    }
    if (!append_ok) {
        printf("error appending image %d times %d\n, with error.", frameCount, j);
    }
    frameCount++;
}

//Finish the session:
[videoWriterInput markAsFinished];  
[videoWriter finishWriting];
NSLog(@"Write Ended");

}

- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                         [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                         [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                         nil];
CVPixelBufferRef pxbuffer = NULL;

CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, 
                                      self.frame.size.width,
                                      self.frame.size.height, 
                                      kCVPixelFormatType_32ARGB, 
                                      (__bridge CFDictionaryRef) options, 
                                      &pxbuffer);
if (status != kCVReturnSuccess){
    NSLog(@"Failed to create pixel buffer");
}

CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);

CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, self.frame.size.width,
                                             self.frame.size.height, 8, 4*self.frame.size.width, rgbColorSpace, 
                                             kCGImageAlphaPremultipliedFirst);
                                             //kCGImageAlphaNoneSkipFirst);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), 
                                       CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);

CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

return pxbuffer;

}

仅供参考:我还尝试将屏幕抓取的图像保存到png文件并检查它们 - 它们都是正确的。

0 个答案:

没有答案