我正在尝试在播放期间更改视频合成的布局(即其组件帧的变换)。看起来有时它会起作用,并且视频合成无缝地改变为新的变换集,但有时它只是冻结并保持当前的变换。 AVPlayer实例上没有状态代码更改,播放器或播放器项目也没有错误。
以前有人经历过这个吗?关于为什么会发生这种情况或如何绕过它的任何建议都将不胜感激。
下面显示了一些代码。重要的一点是'playerItem.videoComposition = videoComposition',这是在点击视频时触发的(出于测试目的)。
此问题的另一个解决方案是在不同的图层上显示视频,但视频必须同步,因此构图似乎是实现此目的的唯一方法。
@implementation VideoView
{
CGSize _videoSize;
CMTimeRange _videoFullRange;
AVMutableCompositionTrack * _compositionTrackVideoA;
AVMutableCompositionTrack * _compositionTrackVideoB;
}
+ (Class)layerClass
{
return [AVPlayerLayer class];
}
- (id)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if ( self )
{
NSString * videoAPath = [[NSBundle mainBundle] pathForResource:@"cam09v2" ofType:@"mp4"];
NSString * videoBPath = [[NSBundle mainBundle] pathForResource:@"cam10v2_b" ofType:@"mp4"];
AVURLAsset * videoAAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:videoAPath] options:nil];
AVURLAsset * videoBAsset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:videoBPath] options:nil];
AVAssetTrack * videoATrack = [[videoAAsset tracksWithMediaType:AVMediaTypeVideo] lastObject];
AVAssetTrack * videoBTrack = [[videoBAsset tracksWithMediaType:AVMediaTypeVideo] lastObject];
AVAssetTrack * audioTrack = [[videoAAsset tracksWithMediaType:AVMediaTypeAudio] lastObject];
_videoSize = [videoATrack naturalSize];
CMTime videoDuration = videoAAsset.duration;
_videoFullRange = CMTimeRangeMake(kCMTimeZero, videoDuration);
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableCompositionTrack * compositionTrackVideoA = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack * compositionTrackVideoB = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack * compositionTrackAudio = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
compositionTrackVideoA.preferredTransform = videoATrack.preferredTransform;
NSError * error = nil;
if ( ! [compositionTrackVideoA insertTimeRange:_videoFullRange ofTrack:videoATrack atTime:kCMTimeZero error:&error] )
NSLog(@"%@", error);
if ( ! [compositionTrackVideoB insertTimeRange:_videoFullRange ofTrack:videoBTrack atTime:kCMTimeZero error:&error] )
NSLog(@"%@", error);
if ( ! [compositionTrackAudio insertTimeRange:_videoFullRange ofTrack:audioTrack atTime:kCMTimeZero error:&error] )
NSLog(@"%@", error);
_compositionTrackVideoA = [compositionTrackVideoA copy];
_compositionTrackVideoB = [compositionTrackVideoB copy];
AVPlayerItem * playerItem = [AVPlayerItem playerItemWithAsset:composition];
AVPlayer * player = [AVPlayer playerWithPlayerItem:playerItem];
[(AVPlayerLayer *)self.layer setPlayer:player];
[player play];
[player addObserver:self forKeyPath:@"status" options:0 context:0];
[self updateCompositionForPlayerItem:playerItem];
UITapGestureRecognizer * tapGesture = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(didTap:)];
[self addGestureRecognizer:tapGesture];
}
return self;
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
if ( [keyPath isEqualToString:@"status"] )
NSLog(@"STATUS %d", ((AVPlayer *)object).status );
}
- (void)updateCompositionForPlayerItem:(AVPlayerItem *)playerItem
{
AVMutableVideoComposition * videoComposition = [AVMutableVideoComposition videoComposition];
AVMutableVideoCompositionInstruction *videoInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
videoInstruction.enablePostProcessing = NO;
videoInstruction.timeRange = _videoFullRange;
AVMutableVideoCompositionLayerInstruction * layerInstructionA = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:_compositionTrackVideoA];
CGAffineTransform transformA = CGAffineTransformMakeScale(0.5, 0.5);
[layerInstructionA setTransform:transformA atTime:kCMTimeZero];
AVMutableVideoCompositionLayerInstruction * layerInstructionB = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:_compositionTrackVideoB];
CGAffineTransform transformB = CGAffineTransformMakeScale(0.5, 0.5);
static int i = 0;
transformB = CGAffineTransformTranslate(transformB, (i++ % 2 == 0) ? _videoSize.width : 0, _videoSize.height);
[layerInstructionB setTransform:transformB atTime:kCMTimeZero];
videoInstruction.layerInstructions = [NSArray arrayWithObjects:layerInstructionA, layerInstructionB, nil];
videoComposition.instructions = [NSArray arrayWithObject:videoInstruction];
videoComposition.frameDuration = CMTimeMake(1, 30); // 30 fps
videoComposition.renderSize = _videoSize;
playerItem.videoComposition = videoComposition;
}
- (void)didTap:(UITapGestureRecognizer *)tapGesture
{
[self updateCompositionForPlayerItem:((AVPlayerLayer *)self.layer).player.currentItem];
}
@end
答案 0 :(得分:1)
您可以节省您想要更改它的时间,并用新的视频合成替换播放器项目,并在您停止播放时使用新的播放器项目再次启动播放器。