我正在尝试创建一个简单的AVAsset,其中包含三个将一个接一个播放的电影,但实际上我只看到第一个正在播放的电影。
在下面的代码中,客户端类调用movieAssetWithBlock
以获取稍后由客户端播放的AVAsset。
根据下面创建的资产,我原本期望video1.mp4播放5秒,此后video2.mp4播放5秒,然后是video3.mp4(虽然情况并非如此......)。
typedef void(^MovieAssetBlock)(AVAsset*);
+ (void) createComposition:(AVMutableComposition*)composition forResource:(NSString*)fileName nextClipStartTime:(CMTime*)nextClipStartTime
{
NSString *itemPath = [[NSBundle mainBundle] pathForResource:[fileName stringByDeletingPathExtension] ofType:[fileName pathExtension]];
NSURL *url = [[[NSURL alloc] initFileURLWithPath:itemPath] autorelease];
NSDictionary *options = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:url options:options];
CMTimeRange timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(5, 1)); // For test - only 5 sec
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:*nextClipStartTime error:nil];
assert(insertOk);
DLog(@"Inserting asset time range [Start: %lld Duration: %lld] at time [%lld]",
timeRangeInAsset.start.value, timeRangeInAsset.duration.value,
(*nextClipStartTime).value);
*nextClipStartTime = CMTimeAdd(*nextClipStartTime, timeRangeInAsset.duration);
}
+ (void) movieAssetWithBlock:(MovieAssetBlock)block
{
CMTime nextClipStartTime = kCMTimeZero;
AVMutableComposition *composition = [AVMutableComposition composition];
[self createComposition:composition forResource:@"video1.mp4" nextClipStartTime:&nextClipStartTime];
[self createComposition:composition forResource:@"video2.mp4" nextClipStartTime:&nextClipStartTime];
[self createComposition:composition forResource:@"video3.mp4" nextClipStartTime:&nextClipStartTime];
block(composition);
}