我使用以下代码从图像和音频阵列生成视频(预先录制)..
- (void)viewDidLoad
{
imagearray=[[NSMutableArray alloc]initWithObjects:@"Quiz pic1.jpg",@"Quiz pic2.jpg",@"Quiz pic3.jpg",@"Quiz pic6.jpg",@"Quiz pic7.jpg",nil];
image1array=[[NSMutableArray alloc]init];
for (int i=0; i<[imagearray count]; i++)
{
UIImage *aimage=[UIImage imageNamed:[imagearray objectAtIndex:i]];
[image1array addObject:aimage];
}
NSLog(@"%@",image1array);
ImageVideoPath=@"/Users/image/Library/Application Support/iPhone Simulator/4.3/Applications/6CC91208-5819-4BFF-B868-6605887861EB/Output";
FinalVideoPath=@"/Users/image/Library/Application Support/iPhone Simulator/4.3/Applications/6CC91208-5819-4BFF-B868-6605887861EB/VideoOutput";
CGSize size;
UIImage *image=[UIImage imageNamed:[imagearray objectAtIndex:0]];
size=image.size;
NSString *audioFilePath;
int duration=10;
//[self pixelBufferFromCGImage:[[image1array objectAtIndex:0] CGImage]];
[self writeImageAndAudioAsMovie:image andAudio:audioFilePath duration:duration];
//[self pixelBufferFromCGImage:[image CGImage] andSize:size];
[super viewDidLoad];
}
- (void)writeImageAndAudioAsMovie:(UIImage*)image andAudio:(NSString *)audioFilePath duration:(int)duration {
NSLog(@"start make movie: length:%d",duration);
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:ImageVideoPath] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
if ([[NSFileManager defaultManager] fileExistsAtPath:ImageVideoPath])
[[NSFileManager defaultManager] removeItemAtPath:ImageVideoPath error:nil];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:image.size.width],AVVideoWidthKey,[NSNumber numberWithInt:image.size.height], AVVideoHeightKey,nil];
AVAssetWriterInput* writerInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
writerInput.expectsMediaDataInRealTime = YES;
[videoWriter setShouldOptimizeForNetworkUse:YES];
[videoWriter addInput:writerInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Write samples:
CVPixelBufferRef buffer = [self pixelBufferFromCGImage:image.CGImage];
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
//Finish the session:
[videoWriter endSessionAtSourceTime:CMTimeMake(duration, 1)];
[writerInput markAsFinished];
[videoWriter finishWriting];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
[videoWriter release];
[writerInput release];
audioFilePath=[[NSBundle mainBundle]pathForResource:@"Video" ofType:@"mp3"];
NSLog(@"%@",audioFilePath);
[self addAudioToFileAtPath:ImageVideoPath andAudioPath:audioFilePath];
}
-(CVPixelBufferRef)pixelBufferFromCGImage: (CGImageRef) image{
float width = CGImageGetWidth(image);
float height = CGImageGetHeight(image);
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, width,height, kCVPixelFormatType_32ARGB,(CFDictionaryRef)options,&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata,width,height,8,4*width,rgbColorSpace,kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGContextDrawImage(context, CGRectMake(0, 0,width, height), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
-(void) addAudioToFileAtPath:(NSString *)vidoPath andAudioPath:(NSString *)audioFilePath{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSLog(@"%@ %@",ImageVideoPath,audioFilePath);
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audioFilePath];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:ImageVideoPath];
NSLog(@"%@",video_inputFileUrl);
NSString *outputFilePath = FinalVideoPath;
NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
NSLog(@"asset:%@",videoAsset);
NSArray *tracks1=[videoAsset tracksWithMediaType:AVMediaTypeVideo];
if ([tracks1 count]>0)
{
//CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVAssetTrack *videoAssetTrack=[tracks1 objectAtIndex:0];
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,videoAsset.duration) ofTrack:videoAssetTrack atTime:kCMTimeZero error:nil];
}
NSArray *tracks = [audioAsset tracksWithMediaType:AVMediaTypeAudio];
if([tracks count]>0)
{
AVAssetTrack * audioAssetTrack = [tracks objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID: kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,audioAsset.duration) ofTrack:audioAssetTrack atTime:kCMTimeZero error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
[audioAsset release];audioAsset = nil;
}
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = AVFileTypeMPEG4;
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (_assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
//export complete
NSLog(@"Export Complete");
break;
case AVAssetExportSessionStatusFailed:
NSLog(@"Export Failed");
NSLog(@"ExportSessionError: %@", [_assetExport.error localizedDescription]);
//export error (see exportSession.error)
break;
case AVAssetExportSessionStatusCancelled:
NSLog(@"Export Failed");
NSLog(@"ExportSessionError: %@", [_assetExport.error localizedDescription]);
//export cancelled
break;
}
}];
}
我发现其中一个视频文件是从方法writeImageAndAudioAsMovie
创建的,但它不支持我机器中的任何播放器......
我不知道我错过了吗?任何建议PLZ ....
答案 0 :(得分:0)
问题出在两个地方:
1.您提供的路径应该是您可以像文档目录一样写的东西。它必须有一些特定的扩展。在创建HighestQualityVideo时应该是.mov。
2.您应该根据扩展名和presetType提供正确的outputFileType。所以在你的情况下它应该是_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
。
尝试这些更改。
<强>更新强>
要删除崩溃,您需要使用AVAssetTrack
方法中的以下代码替换addAudioToFileAtPath
的代码:
NSArray *tracks = [videoAsset tracksWithMediaType:AVMediaTypeAudio];
if([tracks count]>0)
{
AVAssetTrack * audioAssetTrack = [tracks objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID: kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,videoAsset.duration) ofTrack:audioAssetTrack atTime:kCMTimeZero error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
[audioAsset release];audioAsset = nil;
}
关于视频类型:
Facebook支持QuickTimeVideo(mov / qt),请参阅http://www.facebook.com/help/?faq=218673814818907
要支持其他类型的视频,您需要在创建presetName
对象和输出文件扩展名时更改AVAssetExportSession
,请阅读本文档。
更新1:
这里我们正在访问每个图像并在显示一些时间后将其附加到缓冲区(我已经划分了持续时间。
for (int i=0; i<[image1array count]; i++)
{
int time = (int)i*(duration/[image1array count]);
CVPixelBufferRef buffer = [self pixelBufferFromCGImage:[[image1array objectAtIndex:i] CGImage]];
[adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(time, 1)];
}
更新2:
以下是我在编写混合资产时进行了一些更改的代码。
-(void) addAudioToFileAtPath:(NSString *)vidoPath andAudioPath:(NSString *)audioFilePath{
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audioFilePath];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:ImageVideoPath];
NSURL* outputFileUrl = [NSURL fileURLWithPath:FinalVideoPath];
AVMutableComposition *composition = [AVMutableComposition composition];
AVAsset * audioAsset = [AVURLAsset URLAssetWithURL:audio_inputFileUrl options:nil];;
AVAsset * videoAsset = [AVURLAsset URLAssetWithURL:video_inputFileUrl options:nil];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
NSError *error = nil;
BOOL ok = NO;
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVAssetTrack *sourceVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
ok = [compositionVideoTrack insertTimeRange:video_timeRange ofTrack:sourceVideoTrack atTime:kCMTimeZero error:&error];
if (!ok) {
// Deal with the error.
NSLog(@"Error : %@ : %d",error,videoAsset.duration.value);
}
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVAssetTrack *sourceAudioTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
ok = [compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:sourceAudioTrack atTime:kCMTimeZero error:&error];
if (!ok) {
// Deal with the error.
NSLog(@"Error : %@ : %d",error,audioAsset.duration.value);
}
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (_assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
//export complete
NSLog(@"Export Complete");
break;
case AVAssetExportSessionStatusFailed:
NSLog(@"Export Failed");
NSLog(@"ExportSessionError: %@", [_assetExport.error localizedDescription]);
//export error (see exportSession.error)
break;
case AVAssetExportSessionStatusCancelled:
NSLog(@"Export Failed");
NSLog(@"ExportSessionError: %@", [_assetExport.error localizedDescription]);
//export cancelled
break;
}
NSLog(@"Error : %@",_assetExport.error);
}];
}
谢谢,
答案 1 :(得分:0)
引起我注意的一件事是你在CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
方法中调用writeImageAndAudioAsMovie:andAudio:duration:
,但由于你没有创建adaptor.pixelBufferPool
,所以你不拥有它,因此不应该不发布它,对吗?似乎对我怀疑。