我正在尝试使用以下代码将图像添加到视频中:
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:[self assetURL] options:nil];
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
CGSize naturalSize = CGSizeApplyAffineTransform(videoTrack.naturalSize, videoTrack.preferredTransform);
mainCompositionInst.renderSize = naturalSize;
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
CGFloat ratio = naturalSize.width / rectVideo.size.width;
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, naturalSize.width, naturalSize.height);
videoLayer.frame = CGRectMake(0, 0, naturalSize.width, naturalSize.height);
[parentLayer addSublayer:videoLayer];
for(DraggableImageView *iv in imageViews){
CALayer *overlayLayer = [CALayer layer];
[overlayLayer setContents:(id)[[self normalizedImage:iv.image] CGImage]];
overlayLayer.frame = CGRectMake(iv.frame.origin.x * ratio, (rectVideo.size.height - iv.frame.origin.y - iv.frame.size.height) * ratio, iv.frame.size.width * ratio, iv.frame.size.height * ratio);
[overlayLayer setMasksToBounds:YES];
overlayLayer.opacity = 0;
overlayLayer.masksToBounds = YES;
CGFloat duration = CMTimeGetSeconds(iv.endTime) - CMTimeGetSeconds(iv.startTime);
CGFloat start = CMTimeGetSeconds(iv.startTime);
if(duration > CMTimeGetSeconds(videoAsset.duration)){
duration = CMTimeGetSeconds(videoAsset.duration);
start = 0;
}
CGFloat fadeInSeconds = iv.fadeInSeconds;
CGFloat fade = fadeInSeconds / duration;
CAKeyframeAnimation *anim = [[CAKeyframeAnimation alloc] init];
anim.keyPath = @"opacity";
anim.values = @[@0, @1, @1, @0];
NSMutableArray *keyTimes = [NSMutableArray new];
[keyTimes addObject:@0];
[keyTimes addObject:@(fade)];
[keyTimes addObject:@(1.0 - fade)];
[keyTimes addObject:@1];
anim.keyTimes = keyTimes;
anim.duration = duration;
anim.beginTime = AVCoreAnimationBeginTimeAtZero + start;
[overlayLayer addAnimation:anim forKey:@"anim"];
[parentLayer addSublayer:overlayLayer];
}
mainCompositionInst.animationTool = [AVVideoCompositionCoreAnimationTool
videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
NSURL *exportURL = [self exportUrl];
exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.outputURL = exportURL;
exportSession.videoComposition = mainCompositionInst;
exportSession.outputFileType = _recordSession.fileType;
exportSession.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
exportProgressTimer = [NSTimer scheduledTimerWithTimeInterval:.1 target:self selector:@selector(exportDidProgress:) userInfo:nil repeats:YES];
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status) {
case AVAssetExportSessionStatusCompleted:
[self savingComplete:exportSession.error];
break;
case AVAssetExportSessionStatusFailed:
[self savingComplete:exportSession.error];
break;
case AVAssetExportSessionStatusCancelled:
break;
default:
break;
}
}];
此代码在iPhone SE上效果很好,但在iPad Air 2上,在视频的右侧显示了该色差(放大的屏幕截图):
此故障不是一成不变的,它会垂直变化(大小和y位置)。 我认为这不是视频大小的问题(众所周知,视频大小不能被4或16分割时,边缘会出现绿线)。该视频的大小为1080x1920。
有什么想法吗?