我像GPUI这样创建视频效果
self.overlayerView = [[GPUImageView alloc] init];
self.overlayerView.frame = self.view.frame;
dispatch_queue_t queue = dispatch_queue_create("queue", NULL);
dispatch_async(queue, ^{
NSURL *sourceURL = [[NSBundle mainBundle] URLForResource:@"212121" withExtension:@"mp4"];
GPUImageMovie *sourceMovie = [[GPUImageMovie alloc] initWithURL:sourceURL];
sourceMovie.playAtActualSpeed = YES;
sourceMovie.shouldRepeat = YES;
sourceMovie.shouldIgnoreUpdatesToThisTarget = YES;
NSURL *maskURL = [[NSBundle mainBundle] URLForResource:@"rose" withExtension:@"mp4"];
GPUImageMovie *maskMovie = [[GPUImageMovie alloc] initWithURL:maskURL];
maskMovie.playAtActualSpeed = YES;
maskMovie.shouldRepeat = YES;
NSURL *alphaURL = [[NSBundle mainBundle] URLForResource:@"rose_alpha" withExtension:@"mp4"];
GPUImageMovie *alphaMovie = [[GPUImageMovie alloc] initWithURL:alphaURL];
alphaMovie.playAtActualSpeed = YES;
alphaMovie.shouldRepeat = YES;
NSURL *topURL = [[NSBundle mainBundle] URLForResource:@"screen" withExtension:@"mp4"];
GPUImageMovie *topMovie = [[GPUImageMovie alloc] initWithURL:topURL];
topMovie.playAtActualSpeed = YES;
topMovie.shouldRepeat = YES;
filter0 = [[GPUImageThreeInputFilter alloc] initWithFragmentShaderFromString:@"precision highp float;uniform sampler2D inputImageTexture;uniform sampler2D inputImageTexture2;uniform sampler2D inputImageTexture3;varying vec2 textureCoordinate;void main(){vec4 video=texture2D(inputImageTexture,textureCoordinate);vec4 mv=texture2D(inputImageTexture2, textureCoordinate);vec4 alpha = texture2D(inputImageTexture3, textureCoordinate);gl_FragColor = video * (1.0 - alpha.r) + mv;}"];
filter1 = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:@"\nprecision highp float;\nuniform sampler2D inputImageTexture; //video\nuniform sampler2D inputImageTexture2; //screen\nvarying vec2 textureCoordinate;\nvoid main()\n{\nvec4 video = texture2D(inputImageTexture, textureCoordinate);\nvec4 screen = texture2D(inputImageTexture2, textureCoordinate);\nmediump vec4 whiteColor = vec4(1.0);\ngl_FragColor = whiteColor - ((whiteColor - screen) * (whiteColor - video));\n}"];
[sourceMovie addTarget:filter0];
[maskMovie addTarget:filter0];
[alphaMovie addTarget:filter0];
[filter0 addTarget:filter1];
[topMovie addTarget:filter1];
[sourceMovie startProcessing];
[alphaMovie startProcessing];
[maskMovie startProcessing];
[topMovie startProcessing];
[filter0 forceProcessingAtSize:CGSizeMake(480,480)];
[filter1 forceProcessingAtSize:CGSizeMake(480,480)];
dispatch_async(dispatch_get_main_queue(), ^{
[filter1 addTarget:self.overlayerView];
});
});
视频有黑色背景,因为alphaMovie与maskMovie不同时播放?
效果视频没有黑色背景
问题:
1:我如何删除黑色背景?
2:为什么效果视频有黑色背景?
答案 0 :(得分:2)
GPUImage框架不支持像这样的Alpha通道功能。有绿屏功能,因此如果您在绿屏上预先制作视频,则可以从绿屏背景中分割视频。但是,你在这里描述的是一个alpha通道视频和第二个视频,它不会正常工作,因为你同时从两个不同的视频源拉,他们不会保持同步。请注意,即使使用绿屏功能,也会出现确切边缘的问题,如this blog post(包括源代码)中所述。基本问题是靠近绿色但不完全是绿色的边缘可以通过滤波器斜坡以奇数方式处理。您可能想要考虑的另一种方法是在尝试使用iOS视频逻辑播放视频之前,将N帧预分解为1个视频。