我正在使用以下代码连接多个AVURLAssets
:
AVMutableComposition * movie = [AVMutableComposition composition];
CMTime offset = kCMTimeZero;
for (AVURLAsset * asset in assets) {
AVMutableCompositionTrack *compositionVideoTrack = [movie addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [movie addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *assetVideoTrack = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;
AVAssetTrack *assetAudioTrack = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject;
CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
NSError * error = nil;
if (![compositionVideoTrack insertTimeRange: timeRange ofTrack: assetVideoTrack atTime: offset error: &error]) {
NSLog(@"Error adding video track - %@", error);
}
if (![compositionAudioTrack insertTimeRange: timeRange ofTrack: assetAudioTrack atTime: offset error: &error]) {
NSLog(@"Error adding audio track - %@", error);
}
offset = CMTimeAdd(offset, asset.duration);
}
结果合成将播放到所有原始资源的合并持续时间,并且音频播放正确,但只播放第一个资源中的视频,然后暂停其最终帧。
对我做错了什么的想法?
原始资产的排序无关紧要 - 第一个视频和所有音频播放。
答案 0 :(得分:1)
您需要使用AVVideoCompositionInstructions的实例创建AVVideoComposition。看看这个sample code。
您感兴趣的代码将具有这种性质:
AVMutableVideoCompositionLayerInstruction *videoCompositionLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:mutableVideoTrack];
[self.layerInstructions addObject:videoCompositionLayerInstruction];
AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
passThroughInstruction.timeRange = trackTimeRange;
passThroughInstruction.layerInstructions = @[videoCompositionLayerInstruction];
[self.compositionInstructions addObject:passThroughInstruction];
答案 1 :(得分:0)
addMutableTrackWithMediaType:
放在 for
循环中。傻我。固定如下,它就像一个魅力!
我会留下这个,以防其他人遇到同样的问题。
AVMutableComposition * movie = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [movie addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *compositionAudioTrack = [movie addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime offset = kCMTimeZero;
for (AVURLAsset * asset in assets) {
AVAssetTrack *assetVideoTrack = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;
AVAssetTrack *assetAudioTrack = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject;
CMTimeRange timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
NSError * error = nil;
if (![compositionVideoTrack insertTimeRange: timeRange ofTrack: assetVideoTrack atTime: offset error: &error]) {
NSLog(@"Error adding video track - %@", error);
}
if (![compositionAudioTrack insertTimeRange: timeRange ofTrack: assetAudioTrack atTime: offset error: &error]) {
NSLog(@"Error adding audio track - %@", error);
}
offset = CMTimeAdd(offset, asset.duration);
}