我使用下面的代码在视频上添加图像叠加,然后将新生成的视频导出到文档目录。但奇怪的是,视频旋转了90度。
- (void)buildTransitionComposition:(AVMutableComposition *)composition andVideoComposition:(AVMutableVideoComposition *)videoComposition
{
CMTime nextClipStartTime = kCMTimeZero;
NSInteger i;
// Make transitionDuration no greater than half the shortest clip duration.
CMTime transitionDuration = self.transitionDuration;
for (i = 0; i < [_clips count]; i++ ) {
NSValue *clipTimeRange = [_clipTimeRanges objectAtIndex:i];
if (clipTimeRange) {
CMTime halfClipDuration = [clipTimeRange CMTimeRangeValue].duration;
halfClipDuration.timescale *= 2; // You can halve a rational by doubling its denominator.
transitionDuration = CMTimeMinimum(transitionDuration, halfClipDuration);
}
}
// Add two video tracks and two audio tracks.
AVMutableCompositionTrack *compositionVideoTracks[2];
AVMutableCompositionTrack *compositionAudioTracks[2];
compositionVideoTracks[0] = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
compositionVideoTracks[1] = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
compositionAudioTracks[0] = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
compositionAudioTracks[1] = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTimeRange *passThroughTimeRanges = alloca(sizeof(CMTimeRange) * [_clips count]);
CMTimeRange *transitionTimeRanges = alloca(sizeof(CMTimeRange) * [_clips count]);
// Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration.
for (i = 0; i < [_clips count]; i++ ) {
NSInteger alternatingIndex = i % 2; // alternating targets: 0, 1, 0, 1, ...
AVURLAsset *asset = [_clips objectAtIndex:i];
NSValue *clipTimeRange = [_clipTimeRanges objectAtIndex:i];
CMTimeRange timeRangeInAsset;
if (clipTimeRange)
timeRangeInAsset = [clipTimeRange CMTimeRangeValue];
else
timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [asset duration]);
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTracks[alternatingIndex] insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:nil];
/*
CGAffineTransform t = clipVideoTrack.preferredTransform;
NSLog(@"Transform1 : %@",t);
*/
AVAssetTrack *clipAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTracks[alternatingIndex] insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:nextClipStartTime error:nil];
// Remember the time range in which this clip should pass through.
// Every clip after the first begins with a transition.
// Every clip before the last ends with a transition.
// Exclude those transitions from the pass through time ranges.
passThroughTimeRanges[i] = CMTimeRangeMake(nextClipStartTime, timeRangeInAsset.duration);
if (i > 0) {
passThroughTimeRanges[i].start = CMTimeAdd(passThroughTimeRanges[i].start, transitionDuration);
passThroughTimeRanges[i].duration = CMTimeSubtract(passThroughTimeRanges[i].duration, transitionDuration);
}
if (i+1 < [_clips count]) {
passThroughTimeRanges[i].duration = CMTimeSubtract(passThroughTimeRanges[i].duration, transitionDuration);
}
// The end of this clip will overlap the start of the next by transitionDuration.
// (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.)
nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration);
nextClipStartTime = CMTimeSubtract(nextClipStartTime, transitionDuration);
// Remember the time range for the transition to the next item.
transitionTimeRanges[i] = CMTimeRangeMake(nextClipStartTime, transitionDuration);
}
// Set up the video composition if we are to perform crossfade or push transitions between clips.
NSMutableArray *instructions = [NSMutableArray array];
// Cycle between "pass through A", "transition from A to B", "pass through B", "transition from B to A".
for (i = 0; i < [_clips count]; i++ ) {
NSInteger alternatingIndex = i % 2; // alternating targets
// Pass through clip i.
AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
passThroughInstruction.timeRange = passThroughTimeRanges[i];
AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[alternatingIndex]];
/*
CGAffineTransform rotationTransform = CGAffineTransformMakeRotation(M_PI_2);
CGAffineTransform rotateTranslate = CGAffineTransformTranslate(rotationTransform,320,0);
[passThroughLayer setTransform:rotateTranslate atTime:kCMTimeZero];
*/
passThroughInstruction.layerInstructions = [NSArray arrayWithObject:passThroughLayer];
[instructions addObject:passThroughInstruction];
if (i+1 < [_clips count]) {
// Add transition from clip i to clip i+1.
AVMutableVideoCompositionInstruction *transitionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
transitionInstruction.timeRange = transitionTimeRanges[i];
AVMutableVideoCompositionLayerInstruction *fromLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[alternatingIndex]];
AVMutableVideoCompositionLayerInstruction *toLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[1-alternatingIndex]];
if (self.transitionType == SimpleEditorTransitionTypeCrossFade) {
// Fade out the fromLayer by setting a ramp from 1.0 to 0.0.
[fromLayer setOpacityRampFromStartOpacity:1.0 toEndOpacity:0.0 timeRange:transitionTimeRanges[i]];
}
else if (self.transitionType == SimpleEditorTransitionTypePush) {
// Set a transform ramp on fromLayer from identity to all the way left of the screen.
[fromLayer setTransformRampFromStartTransform:CGAffineTransformIdentity toEndTransform:CGAffineTransformMakeTranslation(-composition.naturalSize.width, 0.0) timeRange:transitionTimeRanges[i]];
// Set a transform ramp on toLayer from all the way right of the screen to identity.
[toLayer setTransformRampFromStartTransform:CGAffineTransformMakeTranslation(+composition.naturalSize.width, 0.0) toEndTransform:CGAffineTransformIdentity timeRange:transitionTimeRanges[i]];
}
transitionInstruction.layerInstructions = [NSArray arrayWithObjects:fromLayer, toLayer, nil];
[instructions addObject:transitionInstruction];
}
}
videoComposition.instructions = instructions;
}
请帮助,因为我无法以正确的模式导出肖像视频。任何帮助表示赞赏。 谢谢。
答案 0 :(得分:15)
默认情况下,使用 AVAssetExportSession 导出视频时,视频将从其原始方向旋转。你必须应用它的变换来设置它的确切方向。你可以尝试下面的代码来做同样的事情。
- (AVMutableVideoCompositionLayerInstruction *)layerInstructionAfterFixingOrientationForAsset:(AVAsset *)inAsset
forTrack:(AVMutableCompositionTrack *)inTrack
atTime:(CMTime)inTime
{
//FIXING ORIENTATION//
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:inTrack];
AVAssetTrack *videoAssetTrack = [[inAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if(videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {videoAssetOrientation_= UIImageOrientationRight; isVideoAssetPortrait_ = YES;}
if(videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {videoAssetOrientation_ = UIImageOrientationLeft; isVideoAssetPortrait_ = YES;}
if(videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {videoAssetOrientation_ = UIImageOrientationUp;}
if(videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {videoAssetOrientation_ = UIImageOrientationDown;}
CGFloat FirstAssetScaleToFitRatio = 320.0 / videoAssetTrack.naturalSize.width;
if(isVideoAssetPortrait_) {
FirstAssetScaleToFitRatio = 320.0/videoAssetTrack.naturalSize.height;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[videolayerInstruction setTransform:CGAffineTransformConcat(videoAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
}else{
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[videolayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(videoAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero];
}
[videolayerInstruction setOpacity:0.0 atTime:inTime];
return videolayerInstruction;
}
我希望这会对你有所帮助。
AVAssetTrack *assetTrack = [[inAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableCompositionTrack *mutableTrack = [mergeComposition mutableTrackCompatibleWithTrack:assetTrack];
AVMutableVideoCompositionLayerInstruction *assetInstruction = [self layerInstructionAfterFixingOrientationForAsset:inAsset forTrack:myLocalVideoTrack atTime:videoTotalDuration];
以上是调用上述方法的代码,其中inAsset
是您的视频资产,videoTotalDuration
是CMTime.mergeComposition
中视频的总持续时间是AVMutableComposition
类的对象。
希望这会有所帮助。
编辑:这不是任何回调方法或事件,您必须使用上面提到的必需参数调用它。
答案 1 :(得分:14)
如果您只是想保持原始旋转,这是一种稍微简单的方法。
// Grab the source track from AVURLAsset for example.
AVAssetTrack *assetVideoTrack = [asset tracksWithMediaType:AVMediaTypeVideo].lastObject;
// Grab the composition video track from AVMutableComposition you already made.
AVMutableCompositionTrack *compositionVideoTrack = [composition tracksWithMediaType:AVMediaTypeVideo].lastObject;
// Apply the original transform.
if (assetVideoTrack && compositionVideoTrack) {
[compositionVideoTrack setPreferredTransform:assetVideoTrack.preferredTransform];
}
// Export...
答案 2 :(得分:4)
使用以下方法根据AVMutableVideoComposition
-(AVMutableVideoComposition *) getVideoComposition:(AVAsset *)asset
{
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
CGSize videoSize = videoTrack.naturalSize;
BOOL isPortrait_ = [self isVideoPortrait:asset];
if(isPortrait_) {
NSLog(@"video is portrait ");
videoSize = CGSizeMake(videoSize.height, videoSize.width);
}
composition.naturalSize = videoSize;
videoComposition.renderSize = videoSize;
// videoComposition.renderSize = videoTrack.naturalSize; //
videoComposition.frameDuration = CMTimeMakeWithSeconds( 1 / videoTrack.nominalFrameRate, 600);
AVMutableCompositionTrack *compositionVideoTrack;
compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionLayerInstruction *layerInst;
layerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[layerInst setTransform:videoTrack.preferredTransform atTime:kCMTimeZero];
AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
inst.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
inst.layerInstructions = [NSArray arrayWithObject:layerInst];
videoComposition.instructions = [NSArray arrayWithObject:inst];
return videoComposition;
}
-(BOOL) isVideoPortrait:(AVAsset *)asset
{
BOOL isPortrait = FALSE;
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
// Portrait
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)
{
isPortrait = YES;
}
// PortraitUpsideDown
if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) {
isPortrait = YES;
}
// LandscapeRight
if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0)
{
isPortrait = NO;
}
// LandscapeLeft
if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0)
{
isPortrait = NO;
}
}
return isPortrait;
}
答案 3 :(得分:2)
快速的回答......这对我有用
var assetVideoTrack = (sourceAsset.tracksWithMediaType(AVMediaTypeVideo)).last as! AVAssetTrack
var compositionVideoTrack = (composition.tracksWithMediaType(AVMediaTypeVideo)).last as! AVMutableCompositionTrack
if (assetVideoTrack.playable && compositionVideoTrack.playable) {
compositionVideoTrack.preferredTransform = assetVideoTrack.preferredTransform
}
答案 4 :(得分:2)
swift 2:
do { let paths = NSSearchPathForDirectoriesInDomains( NSSearchPathDirectory.DocumentDirectory, NSSearchPathDomainMask.UserDomainMask, true) let documentsDirectory: AnyObject = paths[0] //this will be changed to accommodate dynamic videos let dataPath = documentsDirectory.stringByAppendingPathComponent(videoFileName+".MOV") let videoAsset = AVURLAsset(URL: NSURL(fileURLWithPath: dataPath), options: nil) let imgGenerator = AVAssetImageGenerator(asset: videoAsset) imgGenerator.appliesPreferredTrackTransform = true let cgImage = try imgGenerator.copyCGImageAtTime(CMTimeMake(0, 1), actualTime: nil) let uiImage = UIImage(CGImage: cgImage) videoThumb.image = uiImage } catch let err as NSError { print("Error generating thumbnail: \(err)") }
答案 5 :(得分:2)
这是swift 4的代码,可以完美地合并最佳方向的视频
let mainComposition = AVMutableComposition()
let compositionVideoTrack = mainComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
let soundtrackTrack = mainComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
var insertTime = kCMTimeZero
for videoAsset in arrayVideos {
try! compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAsset.tracks(withMediaType: .video)[0], at: insertTime)
var assetVideoTrack = (videoAsset.tracks(withMediaType: AVMediaType.video)).last as! AVAssetTrack
var compositionVideoTrack = (mainComposition.tracks(withMediaType: AVMediaType.video)).last as! AVMutableCompositionTrack
compositionVideoTrack.preferredTransform = assetVideoTrack.preferredTransform
try! soundtrackTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: videoAsset.tracks(withMediaType: .audio)[0], at: insertTime)
insertTime = CMTimeAdd(insertTime, videoAsset.duration)
}
let outputFileURL = URL(fileURLWithPath: NSTemporaryDirectory() + "merge1uupo.MOV")
let fileManager = FileManager()
//fileManager.removeItemIfExisted(outputFileURL)
let exporter = AVAssetExportSession(asset: mainComposition, presetName: AVAssetExportPresetHighestQuality)
exporter?.outputURL = outputFileURL
exporter?.outputFileType = AVFileType.mov
exporter?.shouldOptimizeForNetworkUse = true
exporter?.exportAsynchronously {
DispatchQueue.main.async {
// Do what you want at the end
}
}