我正在使用GPUImage在我录制的视频上显示一些叠加层。现在我使用GPUImageAlphaBlendFilter在我的视频上显示图像,但现在我需要用一些“弹出”动画显示这个图像。我想这样做可以在这个图像上从非常小到正常的尺寸,但我的问题是我真的不知道该怎么做。在录制视频时有没有办法制作动画?下面我附上我现在所拥有的代码:
-(void)setUpCameraWithPosition:(bool)switchToFrontCamera
{
if(videoCamera != nil)
{
[videoCamera stopCameraCapture];
}
if(switchToFrontCamera) {
videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionFront];
}
else {
videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack];
}
videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait;
videoCamera.horizontallyMirrorFrontFacingCamera = NO;
videoCamera.horizontallyMirrorRearFacingCamera = NO;
filter = [GPUImageBrightnessFilter new];
filter.brightness = 0.0;
GPUImageTransformFilter *transitionFilter = [[GPUImageTransformFilter alloc] init];
CGAffineTransform trans = CGAffineTransformTranslate(CGAffineTransformIdentity, 0.0, 0.5);
[transitionFilter setAffineTransform:trans];
GPUImageAlphaBlendFilter *blendFilter = [[GPUImageAlphaBlendFilter alloc] init];
blendFilter.mix = 1.0;
UIImage *img = [UIImage imageNamed:@"logo-background-medium-cropped.png"];
iv = [[UIImageView alloc] initWithImage:img];
iv.backgroundColor = [UIColor clearColor];
[videoCamera addTarget:filter];
GPUImageView *filterView = (GPUImageView *)self.preview;
filterView.fillMode = kGPUImageFillModePreserveAspectRatioAndFill;
NSString *pathToMovie = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/Movie.m4v"];
NSLog(@"%@",pathToMovie);
unlink([pathToMovie UTF8String]); // If a file already exists, AVAssetWriter won't let you record new frames, so delete the old movie
NSURL *movieURL = [NSURL fileURLWithPath:pathToMovie];
movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(480.0, 640.0)];
movieWriter.encodingLiveVideo = YES;
movieWriter.shouldPassthroughAudio = YES;
uiElementInput = [[GPUImageUIElement alloc] initWithView:iv];
[filter addTarget:blendFilter atTextureLocation:0];
[uiElementInput addTarget:transitionFilter];
[transitionFilter addTarget:blendFilter atTextureLocation:1];
[blendFilter addTarget:filterView];
[blendFilter addTarget:movieWriter];
[videoCamera startCameraCapture];
__unsafe_unretained GPUImageUIElement *weakUIElementInput = uiElementInput;
[filter setFrameProcessingCompletionBlock:^(GPUImageOutput * filter, CMTime frameTime){
[weakUIElementInput update];
}];
}
-(IBAction)stopCameraTouchUp:(id)sender
{
if(recording)
{
saving = YES;
recording = NO;
dispatch_after(0, dispatch_get_main_queue(), ^(void) {
[filter removeTarget:movieWriter];
videoCamera.audioEncodingTarget = nil;
[movieWriter finishRecording];
NSLog(@"Movie completed");
NSString *path = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/Movie.m4v"];
NSLog(@"%@",path);
ALAssetsLibrary *al = [[ALAssetsLibrary alloc] init];
[al writeVideoAtPathToSavedPhotosAlbum:[NSURL fileURLWithPath:path] completionBlock:^(NSURL *assetURL, NSError *error) {
if (error) {
NSLog(@"Error %@", error);
} else {
NSLog(@"Success");
recordButton.hidden = NO;
[videoCamera stopCameraCapture];
[self.navigationController popViewControllerAnimated:YES];
}
}];
});
}
else if(!saving)
{
recording = NO;
[self.navigationController popViewControllerAnimated:YES];
}
}
- (IBAction)startRecordingTouchUp:(id)sender
{
__unsafe_unretained GPUImageUIElement *weakUIElementInput = uiElementInput;
[filter setFrameProcessingCompletionBlock:^(GPUImageOutput * filter, CMTime frameTime){
[weakUIElementInput update];
}];
dispatch_after(0.0, dispatch_get_main_queue(), ^(void) {
NSLog(@"Start recording");
recording = YES;
[movieWriter startRecording];
});
}
答案 0 :(得分:2)
如果我理解你的问题,你想将动画图像写入视频 我在GPUImage拉取请求中找到了可能的解决方案。
https://github.com/BradLarson/GPUImage/pull/1843 更改了GPUImagePicture.m并添加了在特定帧时间写入图像的功能。 他还添加了一个如何用图像制作动画的例子
+ (void)zoomInAndWriteToMovie:(UIImage*)image
{
NSString* moviePath = @"output.mp4";
NSString* documentPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES) firstObject];
NSURL* movieURL = [NSURL URLWithString:moviePath relativeToURL: [NSURL fileURLWithPath:documentPath isDirectory:YES]];
GPUImageMovieWriter *movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(1280,720)];
GPUImagePicture *imageSource = [[GPUImagePicture alloc] initWithImage:image];
GPUImageTransformFilter *transformFilter = [[GPUImageTransformFilter alloc] init];
[imageSource addTarget:transformFilter];
[transformFilter addTarget:movieWriter];
[movieWriter startRecording];
const int totalFrames = 100;
const float startZoomRatio = 1.2f;
const float endZoomRatio = 1.0f;
for (int i = 0; i < totalFrames; i++) {
float currentZoomRatio = startZoomRatio + (endZoomRatio - startZoomRatio) * i / (float)totalFrames;
CGAffineTransform scaleTransform = CGAffineTransformMakeScale(currentZoomRatio, currentZoomRatio);
transformFilter.affineTransform = scaleTransform;
imageSource.imageFrameTime = CMTimeMake(i, 30); // 30 FPS
dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);
[imageSource processImageWithCompletionHandler:^{
dispatch_semaphore_signal(semaphore);
}];
dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
}
[[GPUImageContext sharedFramebufferCache] purgeAllUnassignedFramebuffers];
[transformFilter removeTarget:movieWriter];
dispatch_semaphore_t semaphore = dispatch_semaphore_create(0);
[movieWriter finishRecordingWithCompletionHandler:^{
dispatch_semaphore_signal(semaphore); // Force to wait for the completion, it might be causing bugs to continue without waiting.
}];
dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER);
[transformFilter removeAllTargets];
[imageSource removeAllTargets];
}