我已使用以下代码
成功地从图像创建了视频-(void)writeImageAsMovie:(NSArray *)array toPath:(NSString*)path size:(CGSize)size duration:(int)duration
{
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* writerInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
[videoWriter addInput:writerInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:0] CGImage]];
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
//Write samples:
for (int i = 0;i<[array count]; i++)
{
if([writerInput isReadyForMoreMediaData])
{
NSLog(@"inside for loop %d",i);
CMTime frameTime = CMTimeMake(1, 20);
CMTime lastTime=CMTimeMake(i, 20); //i is from 0 to 24 of the loop above
CMTime presentTime=CMTimeAdd(lastTime, frameTime);
buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:i] CGImage]];
[adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
}
else
{
NSLog(@"error");
i--;
}
}
NSLog(@"outside for loop");
//Finish the session:
[writerInput markAsFinished];
[videoWriter finishWriting];
}
我在这里使用了CVPixelBufferRef
。我想将CVPixelBufferPoolRef
与AVAssetWriterInputPixelBufferAdaptor
结合使用。
任何人都可以提供我可以调试和使用的示例吗?
答案 0 :(得分:14)
你传递的是'sourcePixelBufferAttributes',因为它不会创建像素缓冲池:
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];
而是传递一些属性,例如:
NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
然后您可以使用池创建像素缓冲区,例如:
CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor.pixelBufferPool, &pixelBuffer);
答案 1 :(得分:6)
答案 2 :(得分:4)
而不是使用“for”使用此代码:
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[writerInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^{
CVPixelBufferRef buffer = NULL;
buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:0] CGImage]];
CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor.pixelBufferPool, &buffer);
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
int i = 1;
while (writerInput.readyForMoreMediaData) {
NSLog(@"inside for loop %d",i);
CMTime frameTime = CMTimeMake(1, 20);
CMTime lastTime=CMTimeMake(i, 20); //i is from 0 to 19 of the loop above
CMTime presentTime=CMTimeAdd(lastTime, frameTime);
if (i >= [array count]) {
buffer = NULL;
}else {
buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:i] CGImage]];
}
//CVBufferRetain(buffer);
if (buffer) {
// append buffer
[adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
i++;
} else {
// done!
//Finish the session:
[writerInput markAsFinished];
[videoWriter finishWriting];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
[videoWriter release];
[writerInput release];
NSLog (@"Done");
[imageArray removeAllObjects];
break;
}
}
}];
答案 3 :(得分:4)
我把它全部搞定了!
以下是示例代码链接:git@github.com:RudyAramayo / AVAssetWriterInputPixelBufferAdaptorSample.git
以下是您需要的代码:
- (void) testCompressionSession
{
CGSize size = CGSizeMake(480, 320);
NSString *betaCompressionDirectory = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/Movie.m4v"];
NSError *error = nil;
unlink([betaCompressionDirectory UTF8String]);
//----initialize compression engine
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:betaCompressionDirectory]
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
if(error)
NSLog(@"error = %@", [error localizedDescription]);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey, nil];
AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
if ([videoWriter canAddInput:writerInput])
NSLog(@"I can add this input");
else
NSLog(@"i can't add this input");
[videoWriter addInput:writerInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//---
// insert demo debugging code to write the same image repeated as a movie
CGImageRef theImage = [[UIImage imageNamed:@"Lotus.png"] CGImage];
dispatch_queue_t dispatchQueue = dispatch_queue_create("mediaInputQueue", NULL);
int __block frame = 0;
[writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
while ([writerInput isReadyForMoreMediaData])
{
if(++frame >= 120)
{
[writerInput markAsFinished];
[videoWriter finishWriting];
[videoWriter release];
break;
}
CVPixelBufferRef buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:theImage size:size];
if (buffer)
{
if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame, 20)])
NSLog(@"FAIL");
else
NSLog(@"Success:%d", frame);
CFRelease(buffer);
}
}
}];
NSLog(@"outside for loop");
}
- (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, &pxbuffer);
// CVReturn status = CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst);
NSParameterAssert(context);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}