我目前正在开发一款合并所需数量视频的iOS应用。用户点击按钮合并视频后,视频将加入,然后使用AVPlayer播放:
CMTime nextClipStartTime = kCMTimeZero;
NSInteger i;
CMTime transitionDuration = CMTimeMake(1, 1); // Default transition duration is one second.
// Add two video tracks and two audio tracks.
AVMutableCompositionTrack *compositionVideoTracks[2];
AVMutableCompositionTrack *compositionAudioTracks[2];
compositionVideoTracks[0] = [self.mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
compositionVideoTracks[1] = [self.mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
compositionAudioTracks[0] = [self.mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
compositionAudioTracks[1] = [self.mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTimeRange *passThroughTimeRanges = alloca(sizeof(CMTimeRange) * [self.selectedAssets count]);
CMTimeRange *transitionTimeRanges = alloca(sizeof(CMTimeRange) * [self.selectedAssets count]);
// Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration.
for (i = 0; i < [self.selectedAssets count]; i++ )
{
NSInteger alternatingIndex = i % 2; // alternating targets: 0, 1, 0, 1, ...
AVURLAsset *asset = [self.selectedAssets objectAtIndex:i];
NSLog(@"number of tracks %d",asset.tracks.count);
CMTimeRange assetTimeRange;
assetTimeRange.start = kCMTimeZero;
assetTimeRange.duration = asset.duration;
NSValue *clipTimeRange = [NSValue valueWithCMTimeRange:assetTimeRange];
CMTimeRange timeRangeInAsset;
if (clipTimeRange)
timeRangeInAsset = [clipTimeRange CMTimeRangeValue];
else
timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [asset duration]);
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTracks[alternatingIndex] insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:nil];
AVAssetTrack *clipAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
[compositionAudioTracks[alternatingIndex] insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:nextClipStartTime error:nil];
// Remember the time range in which this clip should pass through.
// Every clip after the first begins with a transition.
// Every clip before the last ends with a transition.
// Exclude those transitions from the pass through time ranges.
passThroughTimeRanges[i] = CMTimeRangeMake(nextClipStartTime, timeRangeInAsset.duration);
if (i > 0) {
passThroughTimeRanges[i].start = CMTimeAdd(passThroughTimeRanges[i].start, transitionDuration);
passThroughTimeRanges[i].duration = CMTimeSubtract(passThroughTimeRanges[i].duration, transitionDuration);
}
if (i+1 < [self.selectedAssets count]) {
passThroughTimeRanges[i].duration = CMTimeSubtract(passThroughTimeRanges[i].duration, transitionDuration);
}
// The end of this clip will overlap the start of the next by transitionDuration.
// (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.)
nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration);
nextClipStartTime = CMTimeSubtract(nextClipStartTime, transitionDuration);
// Remember the time range for the transition to the next item.
transitionTimeRanges[i] = CMTimeRangeMake(nextClipStartTime, transitionDuration);
}
// Set up the video composition if we are to perform crossfade or push transitions between clips.
NSMutableArray *instructions = [NSMutableArray array];
// Cycle between "pass through A", "transition from A to B", "pass through B", "transition from B to A".
for (i = 0; i < [self.selectedAssets count]; i++ )
{
NSInteger alternatingIndex = i % 2; // alternating targets
// Pass through clip i.
AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
passThroughInstruction.timeRange = passThroughTimeRanges[i];
AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[alternatingIndex]];
passThroughInstruction.layerInstructions = [NSArray arrayWithObject:passThroughLayer];
[instructions addObject:passThroughInstruction];
AVMutableVideoCompositionLayerInstruction *fromLayer;
AVMutableVideoCompositionLayerInstruction *toLayer;
if (i+1 < [self.selectedAssets count])
{
// Add transition from clip i to clip i+1.
AVMutableVideoCompositionInstruction *transitionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
transitionInstruction.timeRange = transitionTimeRanges[i];
fromLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[alternatingIndex]];
toLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[1-alternatingIndex]];
// Fade out the fromLayer by setting a ramp from 1.0 to 0.0.
[fromLayer setOpacityRampFromStartOpacity:1.0 toEndOpacity:0.0 timeRange:transitionTimeRanges[i]];
transitionInstruction.layerInstructions = [NSArray arrayWithObjects:fromLayer, toLayer, nil];
[instructions addObject:transitionInstruction];
}
AVURLAsset *sourceAsset = [AVURLAsset URLAssetWithURL:[self.selectedItemsURL objectAtIndex:i] options:[NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey]];
AVAssetTrack *sourceVideoTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGSize temp = CGSizeApplyAffineTransform(sourceVideoTrack.naturalSize, sourceVideoTrack.preferredTransform);
CGSize size = CGSizeMake(fabsf(temp.width), fabsf(temp.height));
CGAffineTransform transform = sourceVideoTrack.preferredTransform;
self.videoComposition.renderSize = sourceVideoTrack.naturalSize;
if (size.width > size.height) {
[fromLayer setTransform:transform atTime:sourceAsset.duration];
} else {
float s = size.width/size.height;
CGAffineTransform new = CGAffineTransformConcat(transform, CGAffineTransformMakeScale(s,s));
float x = (size.height - size.width*s)/2;
CGAffineTransform newer = CGAffineTransformConcat(new, CGAffineTransformMakeTranslation(x, 0));
[fromLayer setTransform:newer atTime:sourceAsset.duration];
}
}
self.videoComposition.instructions = instructions;
self.videoComposition.frameDuration = CMTimeMake(1, 30);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"mergeVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
self.exporter = [[AVAssetExportSession alloc] initWithAsset:self.mixComposition presetName:AVAssetExportPresetMediumQuality];
self.exporter.outputURL=url;
self.exporter.outputFileType = AVFileTypeQuickTimeMovie;
self.exporter.videoComposition = self.videoComposition;
self.exporter.shouldOptimizeForNetworkUse = YES;
self.playerItem = [AVPlayerItem playerItemWithAsset:self.mixComposition];
self.playerItem.videoComposition = self.videoComposition;
AVPlayer *player = [AVPlayer playerWithPlayerItem:self.playerItem];
AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:player];
[playerLayer setFrame:CGRectMake(0, 0, self.imageView.frame.size.width, self.imageView.frame.size.height)];
[[[self imageView] layer] addSublayer:playerLayer];
playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[player play];
[[NSNotificationCenter defaultCenter]
addObserver:self selector:@selector(checkPlayEnded) name:AVPlayerItemDidPlayToEndTimeNotification object:self.playerItem];
我目前面临以下问题:
如果一个视频是纵向的,而另一个是横向的,我将如何在横向旋转纵向视频,因为我的视图是横向,但是纵向视频保留其原始状态? (我正在加载存储在相机胶卷中的视频,而不是在我的应用程序中录制它们)
忽略上述问题,如果我合并任意数量的视频,它们都可以正常工作。一旦我将新视频保存在我的库中,然后再次将其加载到我的应用程序中并尝试将该视频与其他一些新视频一起加入,分辨率就会受到干扰,尽管两个视频如果在应用程序中单独播放,效果确实很好。我该如何解决?
(我已尝试按照WWDC 2010视频编辑教程,因此从此处提取此代码。)
答案 0 :(得分:2)
在为AVMutableVideoCompositionInstruction创建对象时,您可以在上面的代码中检查视频运行时的方向。
要附加到代码以修复问题的代码是....
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mutableComposition duration]);
AVAssetTrack *videoTrack = [[mutableComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction * layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = assetVideoTrack.preferredTransform;
if(videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0)
{
videoAssetOrientation_= UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if(videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0)
{
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
CGFloat FirstAssetScaleToFitRatio = 320.0 / assetVideoTrack.naturalSize.width;
if(isVideoAssetPortrait_)
{
videoSize=CGSizeMake(350,400);
FirstAssetScaleToFitRatio = 320.0/assetVideoTrack.naturalSize.height;
CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
[layerInstruction setTransform:CGAffineTransformConcat(assetVideoTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
}
else
{
videoSize=CGSizeMake(assetVideoTrack.naturalSize.width,assetVideoTrack.naturalSize.height);
}
上述代码会将横向视频保持在横向状态,并防止视频从纵向转换为横向。
我希望这会有所帮助。而不是首先转换为正确的方向,然后应用编辑。如果你附加这个代码,你的一步将减少,并可以在一个代码和两个代码中做两件事(即编辑和方向)更快的方式。