我想将h.264流从服务器转换为视频文件,但是当我使用assetwrite.finishwrite
时,XCode报告
Video /var/mobile/Applications/DE4196F1-BB77-4B7D-8C20-7A5D6223C64D/Documents/test.mov cannot be saved to the saved photos album: Error Domain=NSOSStatusErrorDomain Code=-12847 "This movie format is not supported." UserInfo=0x5334830 {NSLocalizedDescription=This movie format is not supported.}"
以下是我的代码: 数据是h.264帧,只有一帧,可能是i帧或p。
(void)_encodeVideoFrame2:(NSData *) data time:(double)tm
{
CMBlockBufferRef videoBlockBuffer=NULL;
CMFormatDescriptionRef videoFormat=NULL;
CMSampleBufferRef videoSampleBuffer=NULL;
CMItemCount numberOfSampleTimeEntries=1;
CMItemCount numberOfSamples=1;
CMVideoFormatDescriptionCreate(kCFAllocatorDefault, kCMVideoCodecType_H264, 320, 240, NULL, &videoFormat);
OSStatus result;
result=CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault, NULL, data.length, kCFAllocatorDefault, NULL, 0, data.length, kCMBlockBufferAssureMemoryNowFlag, &videoBlockBuffer);
result=CMBlockBufferReplaceDataBytes(data.bytes, videoBlockBuffer, 0, data.length);
CMSampleTimingInfo videoSampleTimingInformation={CMTimeMake(tm*600, 600)};
size_t sampleSizeArray[1];
sampleSizeArray[0]=data.length;
result=CMSampleBufferCreate(kCFAllocatorDefault, videoBlockBuffer, TRUE, NULL, NULL, videoFormat, numberOfSamples, numberOfSampleTimeEntries, &videoSampleTimingInformation, 1, sampleSizeArray, &videoSampleBuffer);
result = CMSampleBufferMakeDataReady(videoSampleBuffer);
[assetWriterInput appendSampleBuffer:videoSampleBuffer];
}
也许CMSampleBufferCreate
参数错了?谢谢。
答案 0 :(得分:3)
(IBAction)createVideo:(id)sender {
///////////// setup OR function def if we move this to a separate function ////////////
// this should be moved to its own function, that can take an imageArray, videoOutputPath, etc...
// - (void)exportImages:(NSMutableArray *)imageArray
// asVideoToPath:(NSString *)videoOutputPath
// withFrameSize:(CGSize)imageSize
// framesPerSecond:(NSUInteger)fps {
NSError *error = nil;
// set up file manager, and file videoOutputPath, remove "test_output.mp4" if it exists...
//NSString *videoOutputPath = @"/Users/someuser/Desktop/test_output.mp4";
NSFileManager *fileMgr = [NSFileManager defaultManager];
NSString *documentsDirectory = [NSHomeDirectory()
stringByAppendingPathComponent:@"Documents"];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:@"test_output.mp4"];
//NSLog(@"-->videoOutputPath= %@", videoOutputPath);
// get rid of existing mp4 if exists...
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
NSLog(@"Unable to delete file: %@", [error localizedDescription]);
CGSize imageSize = CGSizeMake(400, 200);
NSUInteger fps = 30;
//NSMutableArray *imageArray;
//imageArray = [[NSMutableArray alloc] initWithObjects:@"download.jpeg", @"download2.jpeg", nil];
NSMutableArray imageArray;
NSArray imagePaths = [[NSBundle mainBundle] pathsForResourcesOfType:@"jpg" inDirectory:nil];
imageArray = [[NSMutableArray alloc] initWithCapacity:imagePaths.count];
NSLog(@"-->imageArray.count= %i", imageArray.count);
for (NSString* path in imagePaths)
{
[imageArray addObject:[UIImage imageWithContentsOfFile:path]];
//NSLog(@"-->image path= %@", path);
}
////////////// end setup ///////////////////////////////////
NSLog(@"Start building video from defined frames.");
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
CVPixelBufferRef buffer = NULL;
//convert uiimage to CGImage.
int frameCount = 0;
double numberOfSecondsPerFrame = 6;
double frameDuration = fps * numberOfSecondsPerFrame;
//for(VideoFrame * frm in imageArray)
NSLog(@"****************************");
for(UIImage * img in imageArray)
{
//UIImage * img = frm._imageFrame;
buffer = [self pixelBufferFromCGImage:[img CGImage]];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
//print out status:
NSLog(@"Processing video frame (%d,%d)",frameCount,[imageArray count]);
CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(!append_ok){
NSError *error = videoWriter.error;
if(error!=nil) {
NSLog(@"Unresolved error %@,%@.", error, [error userInfo]);
}
}
}
else {
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n, with error.", frameCount, j);
}
frameCount++;
}
NSLog(@"****************************");
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
NSLog(@"Write Ended");
////////////////////////////////////////////////////////////////////////////
////////////// OK now add an audio file to move file /////////////////////
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString *bundleDirectory = [[NSBundle mainBundle] bundlePath];
// audio input file...
NSString *audio_inputFilePath = [bundleDirectory stringByAppendingPathComponent:@"30secs.mp3"];
NSURL *audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
// this is the video file that was just written above, full path to file is in --> videoOutputPath
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];
// create the final video output file as MOV file - may need to be MP4, but this works so far...
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:@"final_video.mp4"];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
//AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
__block AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetPassthrough];
NSLog(@"support file types= %@", [_assetExport supportedFileTypes]);
_assetExport.outputFileType = @"com.apple.quicktime-movie";
NSLog(@"support file types= %@", [_assetExport supportedFileTypes]);
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:^{
switch (_assetExport.status) {
case AVAssetExportSessionStatusCompleted:
// Custom method to import the Exported Video
NSLog(@"completed!!!");
break;
case AVAssetExportSessionStatusFailed:
//
NSLog(@"Failed:%@",_assetExport.error);
break;
case AVAssetExportSessionStatusCancelled:
//
NSLog(@"Canceled:%@",_assetExport.error);
break;
default:
break;
}
}];
///// THAT IS IT DONE... the final video file will be written here...
NSLog(@"DONE.....outputFilePath--->%@", outputFilePath);
// the final video file will be located somewhere like here:
// /Users/caferrara/Library/Application Support/iPhone Simulator/6.0/Applications/D4B12FEE-E09C-4B12-B772-7F1BD6011BE1/Documents/outputFile.mov
}
答案 1 :(得分:1)
为了将已压缩的h264缓冲区与 <?php
$myfile = fopen("newpage.html", "w") or die("Unable to open file!");
fwrite($myfile, $_REQUEST["data"]) or die("Unable write!");
fclose($myfile);
echo $_REQUEST["data"];
?>
进行复用,您必须指定AVAssetWriter
OutputSettings,这与文档所述的内容相反。
所以,而不是做
nil
你应该做的
videoInput = [[AVAssetWriterInput alloc]
initWithMediaType:AVMediaTypeVideo outputSettings:@{
AVVideoCodecKey: AVVideoCodecH264,
}
sourceFormatHint:myVideoFormat
];
这将让您传输视频或音频数据,而无需AVAssetWriterInput尝试为您编码/转码任何内容。