我很难确定为什么我只获取图像和音频而不是导出视频。任何帮助将非常感激。如您所见,我也尝试使用videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer
但在导出时收到错误。我不喜欢我使用哪种方法(videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer
或videoCompositionCoreAnimationToolWithAdditionalLayer
),我只需要它就可以了。大声笑。提前感谢大家的帮助。
#import "ThirdView.h"
@implementation ThirdView
- (void)viewDidLoad {
imagePicker = [[UIImagePickerController alloc] init];
[super viewDidLoad];
}
- (NSString *) filePath: (NSString *) fileName {
NSArray *paths = NSSearchPathForDirectoriesInDomains(
NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDir = [paths objectAtIndex:0];
return [documentsDir stringByAppendingPathComponent:fileName];
}
- (IBAction) btnClicked: (id) sender{
imagePicker.delegate = self;
imagePicker.sourceType = UIImagePickerControllerSourceTypePhotoLibrary;
NSArray *mediaTypes =
[NSArray arrayWithObjects: (NSString *) kUTTypeMovie, nil];
imagePicker.mediaTypes = mediaTypes;
//---show the Image Picker--
[self presentModalViewController: imagePicker animated: YES] ;
}
- (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info {
/// incoming video
NSURL *videoURL = [info valueForKey:UIImagePickerControllerMediaURL];
/// UIImage into CALayer
CALayer *aLayer = [CALayer layer];
aLayer.contents = (id) [UIImage imageNamed:@"test.png"].CGImage;
aLayer.frame = CGRectMake(0, 0, 480, 320);
/* only use with videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, 480, 320);
videoLayer.frame = CGRectMake(0, 0, 480, 320);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:aLayer];
*/
AVURLAsset* url = [AVURLAsset URLAssetWithURL:videoURL options:nil];
AVMutableComposition *videoComposition = [AVMutableComposition composition];
NSError *error;
NSFileManager *fileManager = [NSFileManager defaultManager];
AVMutableCompositionTrack *compositionVideoTrack = [videoComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [videoComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[url tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [url duration]) ofTrack:clipVideoTrack atTime:kCMTimeZero error:&error];
AVAssetTrack *clipAudioTrack = [[url tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, [url duration]) ofTrack:clipAudioTrack atTime:kCMTimeZero error:&error];
AVMutableVideoComposition* videoComp = [[AVMutableVideoComposition videoComposition] retain] ;
videoComp.renderSize = CGSizeMake(480, 320);
videoComp.frameDuration = CMTimeMake(1, 30);
//videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:aLayer inLayer:parentLayer];
videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithAdditionalLayer:aLayer asTrackID:2];
/// instruction
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30) );
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
[layerInstruction setTrackID:2];
[layerInstruction setOpacity:1.0 atTime:kCMTimeZero ];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComp.instructions = [NSArray arrayWithObject: instruction];
/// outputs
NSString *filePath = nil;
filePath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
filePath = [filePath stringByAppendingPathComponent:@"temp.mov"];
NSLog(@"exporting to: %@", filePath);
if ([fileManager fileExistsAtPath:filePath])
{
BOOL success = [fileManager removeItemAtPath:filePath error:&error];
if (!success) NSLog(@"FM error: %@", [error localizedDescription]);
}
/// exporting
AVAssetExportSession *exporter;
exporter = [[AVAssetExportSession alloc] initWithAsset:videoComposition presetName:AVAssetExportPresetHighestQuality] ;
exporter.videoComposition = videoComp;
exporter.outputURL=[NSURL fileURLWithPath:filePath];
exporter.outputFileType=AVFileTypeQuickTimeMovie;
[statusLabel setText:@"processing..."];
[exporter exportAsynchronouslyWithCompletionHandler:^(void){
switch (exporter.status) {
case AVAssetExportSessionStatusFailed:
NSLog(@"exporting failed");
break;
case AVAssetExportSessionStatusCompleted:
NSLog(@"exporting completed");
UISaveVideoAtPathToSavedPhotosAlbum(filePath, self, @selector (video:didFinishSavingWithError:contextInfo:), NULL);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(@"export cancelled");
break;
}
}];
//---hide the Image Picker---
[picker dismissModalViewControllerAnimated:YES];
[exporter autorelease];
}
- (void) video:(NSString *)videoPath
didFinishSavingWithError: (NSError *) error
contextInfo: (void *) contextInfo {
NSLog(@"Finished saving video with error: %@", error);
}
- (void)imagePickerControllerDidCancel:(UIImagePickerController *)picker {
//---user did not select image/video; hide the Image Picker---
[picker dismissModalViewControllerAnimated:YES];
}
- (void)dealloc {
[imagePicker release];
[super dealloc];
}
- (void)didReceiveMemoryWarning {
// Releases the view if it doesn't have a superview.
[super didReceiveMemoryWarning];
// Release any cached data, images, etc that aren't in use.
}
- (void)viewDidUnload {
// Release any retained subviews of the main view.
// e.g. self.myOutlet = nil;
}
@end
答案 0 :(得分:3)
您必须通过设置
为出口商提供音频构成
exporter.audioMix;
答案 1 :(得分:0)
此代码的问题是图像和视频的尺寸均相同,即480x320。因此,您可以看到视频或图像。在PersonsCarsPresenter
之后添加了注释的代码CarPresenter
,因此看不到视频,而看到图片。万一您以不同的方式解决了这个问题,请记住,请共享相同的内容。谢谢。