我制作的应用需要在视频中添加过滤器;
我使用GPUImageMovieWriter
和GPUImageMovie
在mp4上添加过滤器,效果很好。
但我想在一个视频上添加多个过滤器;
我尝试GPUImageFilterGroup
,但我不知道如何制作它。
我的意思是同时添加亮度过滤器和const文件管理器,我尝试先添加一个过滤器,然后添加另一个过滤器,但这需要很多时间,我的应用程序无法等待它。
我使用此代码添加过滤器:
- (void)inputTwoFilters {
/**
* 配置视频文件的输入输出
*/
NSString *origalVedioPath = [[NSBundle mainBundle] pathForResource:@"1" ofType:@"mp4"];
NSURL *origalVedioURL = [NSURL fileURLWithPath:origalVedioPath];
GPUImageMovie *movieFile = [[GPUImageMovie alloc] initWithURL:origalVedioURL];
NSString *pathToMovie = @"/Users/hanlan/Desktop/t/test14.mp4";
unlink([pathToMovie UTF8String]);
/**
* 获取视频的窗口大小
*/
AVAsset *anAsset = [[AVURLAsset alloc] initWithURL:origalVedioURL options:nil];
CGSize size = [[[anAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] naturalSize];
/**
* 构造FilterGroup
*/
GPUImageFilterGroup *filterGroup = [[GPUImageFilterGroup alloc] init];
GPUImageSepiaFilter *sepiaFilter = [[GPUImageSepiaFilter alloc] init];
[(GPUImageFilterGroup *)filterGroup addFilter:sepiaFilter];
GPUImageVignetteFilter *VeFilter = [[GPUImageVignetteFilter alloc] init];
[(GPUImageFilterGroup *)filterGroup addFilter:VeFilter];
[sepiaFilter addTarget:VeFilter];
/**
* sepiaFilter << VeFilter
* group <<Initial sepiaFilter 开始
* group <<Terminal VeFilter 结尾
*/
[(GPUImageFilterGroup *)filterGroup setInitialFilters:[NSArray arrayWithObject:sepiaFilter]];
[(GPUImageFilterGroup *)filterGroup setTerminalFilter:VeFilter];
[filterGroup forceProcessingAtSizeRespectingAspectRatio:size];
/**
* 准备构建新视频
*/
GPUImageMovieWriter *movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:[NSURL fileURLWithPath:pathToMovie] size:size];
[filterGroup addTarget:movieWriter];
[movieFile addTarget:filterGroup];
movieFile.runBenchmark = YES;
[movieFile enableSynchronizedEncodingUsingMovieWriter:movieWriter];
movieFile.audioEncodingTarget = movieWriter;
[movieWriter startRecording];
[movieFile startProcessing];
[movieWriter setCompletionBlock:^{
NSLog(@"finished!!!");
}];
}
它崩溃了代码:
2015-08-22 12:30:33.792 ttt[3002:974193] *** Terminating app due to uncaught exception 'NSInvalidArgumentException', reason: '*** -[AVAssetWriterInput appendSampleBuffer:] Input buffer must be in an uncompressed format when outputSettings is not nil'
ORZ ......
答案 0 :(得分:1)
要从iOS设备的相机中过滤实时视频,您可以使用以下代码:
GPUImageVideoCamera *videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack];
videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait;
GPUImageFilter *customFilter = [[GPUImageFilter alloc] initWithFragmentShaderFromFile:@"CustomShader"];
GPUImageView *filteredVideoView = [[GPUImageView alloc] initWithFrame:CGRectMake(0.0, 0.0, viewWidth, viewHeight)];
// Add the view somewhere so it's visible
[videoCamera addTarget:customFilter];
[customFilter addTarget:filteredVideoView];
[videoCamera startCameraCapture];
More releted to GPUImage Video Filter
- (void) applyFilterForVideo : (UIButton *) sender
{
NSLog(@"%d,%d",intLastSelFilterTag,sender.tag);
// User cannot use Locked Filters
if (sender.tag < 22) // Apply condition Just for Filters
{
NSLog(@"selected Tag - %@ ",[mutArrLockFilter objectAtIndex:sender.tag]);
if([[mutArrLockFilter objectAtIndex:sender.tag] isEqualToString:@"1"])
{
[self shouldReturnWithWalsWithTag:sender.tag];
return;
}
}
if (!isToolboxFilter && !isOriginal)
{
if (intLastSelFilterTag == sender.tag)
{
if(moviePlayer.rate == 0.0)
{
if (isAudioSelected)
{
[self playAudio:nil];
}
else
{
[self playMovie];
}
}
return;
}
else
{
intLastSelFilterTag = sender.tag;
[moviePlayer pause];
}
}
movieFile.delegate = nil;
movieWriter.delegate = nil;
[self cancelVideoProcessing];
[movieFile removeAllTargets];
[filterGroup removeTarget:movieWriter];
[movieWriter cancelRecording];
movieFile = nil;
movieWriter = nil;
[Crittercism leaveBreadcrumb:@"filterVideoItem - Filter"];
btnPlayVideo.hidden = YES;
btnPlayVideo.tag = sender.tag;
btnLastFilterForVideo = sender;
__weak UIButton *weakbtnPlay = btnPlayVideo;
isImageEdited = YES;
currentFilterIndex = sender.tag;
if (sender.tag == 0)
{
[self normalVideoFilter];
NSLog(@"select normal");
return;
}
if (sender.tag < 22)
{
isFilterApplied = YES;
if (isOriginal)
{
originalVideoURL = [NSURL fileURLWithPath:originalMoviePath];
unlink([filterMoviePath UTF8String]);
newFilterVideoURL = [NSURL fileURLWithPath:filterMoviePath];
}
else
{
originalVideoURL = [NSURL fileURLWithPath:toolMoviePath];
unlink([finalMoviePath UTF8String]);
newFilterVideoURL = [NSURL fileURLWithPath:finalMoviePath];
}
}
else
{
isToolApplied = YES;
if (isOriginal)
{
originalVideoURL = [NSURL fileURLWithPath:originalMoviePath];
unlink([toolMoviePath UTF8String]);
newFilterVideoURL = [NSURL fileURLWithPath:toolMoviePath];
}
else
{
originalVideoURL = [NSURL fileURLWithPath:filterMoviePath];
unlink([finalMoviePath UTF8String]);
newFilterVideoURL = [NSURL fileURLWithPath:finalMoviePath];
}
}
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.05f * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
movieFile = [[GPUImageMovie alloc] initWithURL:originalVideoURL];
movieFile.playAtActualSpeed = YES;
NSLog(@"movieFile is %@",movieFile);
AVAsset *anAsset = [[AVURLAsset alloc] initWithURL:originalVideoURL options:nil];
CGSize size = [[[anAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] naturalSize];
NSLog(@"width - %f,%f",size.width,size.height);
@try
{
UIButton *button = (UIButton *)sender;
currentFilterIndex = button.tag;
if (sender.tag < 12 && sender.tag != 2)
{
filterGroup = [[GPUImageFilterGroup alloc] init];
[self setFilterOnImage:toolImage fromTool:NO isOriginal:isOriginal];
}
else if(sender.tag == 12) // Influencia - Highway
{
filterType = GPUIMAGE_SOFTELEGANCE;
filterGroup = [[GPUImageSoftEleganceFilter alloc] init];
}
else if(sender.tag == 13) // Dark Side
{
[filterSettingsSlider setMinimumValue:0.0];
[filterSettingsSlider setMaximumValue:1.0];
if(intLastSelFilterTag != sender.tag)
{
[filterSettingsSlider setValue:0.50];
}
filterType = GPUIMAGE_SOBELEDGEDETECTION;
filterGroup = [[GPUImageSobelEdgeDetectionFilter alloc] init];
}
else if(sender.tag == 17) // Blue Velvet
{
filterType = GPUIMAGE_AMATORKA;
filterGroup = [[GPUImageAmatorkaFilter alloc] init];
}
else if(sender.tag == 2) // Lost Memories
{
filterType = GPUIMAGE_GRAYSCALE;
filterGroup = [[GPUImageGrayscaleFilter alloc] init];
}
else if(sender.tag == 20) // Miss etikate
{
filterType = GPUIMAGE_MISSETIKATE;
filterGroup = [[GPUImageMissEtikateFilter alloc] init];
}
else if(sender.tag == 14) // Carousel
{
[filterSettingsSlider setMinimumValue:0.0];
[filterSettingsSlider setMaximumValue:360.0];
if(intLastSelFilterTag != sender.tag)
{
[filterSettingsSlider setValue:180.0];
}
filterType = GPUIMAGE_HUE;
filterGroup = [[GPUImageHueFilter alloc] init];
}
else if(sender.tag == 16) // Scatter
{
if(intLastSelFilterTag != sender.tag)
{
[filterSettingsSlider setValue:0.15];
}
[filterSettingsSlider setMinimumValue:0.0];
[filterSettingsSlider setMaximumValue:0.3];
filterType = GPUIMAGE_POLKADOT;
filterGroup = [[GPUImagePolkaDotFilter alloc] init];
if ([filterGroup isKindOfClass:[GPUImagePolkaDotFilter class]])
{
[(GPUImagePolkaDotFilter *)filterGroup setDotScaling:3.0];
}
}
else if(sender.tag == 19) // Comic Strip
{
[filterSettingsSlider setMinimumValue:0.0];
[filterSettingsSlider setMaximumValue:1.0];
if(intLastSelFilterTag != sender.tag)
{
[filterSettingsSlider setValue:0.5];
}
filterType = GPUIMAGE_SKETCH;
filterGroup = [[GPUImageSketchFilter alloc] init];
}
else if(sender.tag == 18) // Alien
{
filterType = GPUIMAGE_COLORINVERT;
filterGroup = [[GPUImageColorInvertFilter alloc] init];
}
else if(sender.tag == 15) // New 11 - Mist and Shadow
{
filterGroup = [[GPUImageFilterGroup alloc] init];
GPUImageSepiaFilter *sepiaFilter = [[GPUImageSepiaFilter alloc] init];
[(GPUImageFilterGroup *)filterGroup addFilter:sepiaFilter];
GPUImageVignetteFilter *VeFilter = [[GPUImageVignetteFilter alloc] init];
[(GPUImageFilterGroup *)filterGroup addFilter:VeFilter];
[sepiaFilter addTarget:VeFilter];
[(GPUImageFilterGroup *)filterGroup setInitialFilters:[NSArray arrayWithObject:sepiaFilter]];
[(GPUImageFilterGroup *)filterGroup setTerminalFilter:VeFilter];
}
else if(sender.tag == 21) // New 13 - Superstition
{
filterGroup = [[GPUImageFilterGroup alloc] init];
GPUImageMonochromeFilter *filter1 = [[GPUImageMonochromeFilter alloc] init];
[(GPUImageMonochromeFilter *)filter1 setColor:(GPUVector4){0.0f, 0.0f, 1.0f, 1.f}];
[(GPUImageFilterGroup *)filterGroup addFilter:filter1];
GPUImageAmatorkaFilter *filter2 = [[GPUImageAmatorkaFilter alloc] init];
[(GPUImageFilterGroup *)filterGroup addFilter:filter2];
GPUImageBrightnessFilter *filter3 = [[GPUImageBrightnessFilter alloc] init];
[(GPUImageBrightnessFilter *)filter3 setBrightness:0.3];
[(GPUImageFilterGroup *)filterGroup addFilter:filter3];
[filter2 addTarget:filter1];
[filter1 addTarget:filter3];
[(GPUImageFilterGroup *)filterGroup setInitialFilters:[NSArray arrayWithObject:filter2]];
[(GPUImageFilterGroup *)filterGroup setTerminalFilter:filter3];
}
else
{
if (sender.tag >= 22 && sender.tag <= 27)
{
filterGroup = [[GPUImageFilterGroup alloc] init];
[self setFilterOnImage:toolImage
fromTool:YES
isOriginal:isOriginal];
}
}
NSLog(@"The frame is %@",NSStringFromCGRect(imageView.frame));
NSLog(@"Aspect Ratio is %f",size.width/size.height);
float aspectRatio = (size.width/size.height);
if (0) // Fill with the screen bounds.
{
imageView.frame = CGRectMake(imageView.frame.origin.x, imageView.frame.origin.y, ScreenWidth, ScreenWidth);
if (aspectRatio > 1.0)
{
imageView.frame = CGRectMake(imageView.frame.origin.x, imageView.frame.origin.y, imageView.frame.size.width * aspectRatio, imageView.frame.size.height);
}
else
{
imageView.frame = CGRectMake(imageView.frame.origin.x, imageView.frame.origin.y, imageView.frame.size.width, imageView.frame.size.height/aspectRatio);
}
}
//imageView.frame = CGRectMake(imageView.frame.origin.x,imageView.frame.origin.y, 320, imageView.frame.size.width / aspectRatio);
float countedWidth = ScreenWidth;
float countedHeight = 0.0f;
countedHeight = (countedWidth/aspectRatio);
NSLog(@"Counted Width is %f and Height is %f",countedWidth,countedHeight);
movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:newFilterVideoURL
size:CGSizeMake(ScreenWidth, ScreenWidth)];//
if ([self.strFilterSelMedia isEqualToString:kVideo] &&
APP_DELEGATE.isVideoFromPhotoLibrary)
{
[filterGroup setInputRotation:kGPUImageRotateRight atIndex:0];
}
//[movieWriter setInputRotation:kGPUImageRotateRight atIndex:0];
/* also try this
processedImage = [UIImage imageWithCGImage:[processedImage CGImage] scale:1.0 orientation:originalImage.imageOrientation];
*/
NSLog(@"1 - %s",__PRETTY_FUNCTION__);
// Configure this for video from the movie file, where we want to preserve all video frames and audio samples
//Pankaj audio support change
movieWriter.shouldPassthroughAudio = YES;
movieWriter.encodingLiveVideo = NO;
movieFile.audioEncodingTarget = movieWriter;
__weak GPUImageMovieWriter *weakMoviWriter = movieWriter;
//set any resolution you want
//[filterGroup forceProcessingAtSize:CGSizeMake(320.0f, 320.0f)];
[filterGroup forceProcessingAtSizeRespectingAspectRatio:CGSizeMake(ScreenWidth, ScreenWidth)];
[movieFile addTarget:filterGroup];
[filterGroup addTarget:imageView];
[filterGroup addTarget:movieWriter];
movieFile.runBenchmark = YES;
[movieFile enableSynchronizedEncodingUsingMovieWriter:movieWriter];
[movieWriter startRecording];
[movieFile startProcessing];
__weak GPUImageOutput<GPUImageInput> *weakGPUImageFilter = filterGroup;
//[[UIApplication sharedApplication] beginIgnoringInteractionEvents];
isPlayAgain = FALSE;
urlLastToPlay = newFilterVideoURL;
APP_DELEGATE.urlProcessedVideo = newFilterVideoURL;
isPlayAudioInBackground = YES;
__block NSURL *thelastURL = urlLastToPlay;
__block AVPlayer *theMoviePlayer = moviePlayer;
//__block UIView *theView = viewContainer;
__block GPUImageView *theImageView = imageView;
__block NSString *typeOfMedia = self.strFilterSelMedia;
imageView.fillMode = kGPUImageFillModePreserveAspectRatio;
NSLog(@"ImageView.size is %@",NSStringFromCGSize(imageView.frame.size));
if (isAudioSelected)
{
// NOTE : 0.05 delay to remove gap between sound and video.
[self performSelector:@selector(playAudio:)
withObject:nil
afterDelay:0.05f];
}
else
{
[self playMovie];
}
//[viewContainer bringSubviewToFront:imageView];
[movieWriter setCompletionBlock:^{
[weakGPUImageFilter removeTarget:weakMoviWriter];
[weakMoviWriter finishRecording];
NSLog(@"Finish movieWriterSoftElegance");
dispatch_async(dispatch_get_main_queue(),
^{
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:thelastURL options:nil];
AVPlayerItem *playerItem = [[AVPlayerItem alloc] initWithAsset:asset];
float currentDuration = CMTimeGetSeconds(theMoviePlayer.currentTime);
[theMoviePlayer replaceCurrentItemWithPlayerItem:playerItem];
[theMoviePlayer seekToTime:kCMTimeZero];
//[theView sendSubviewToBack:theImageView];
float timeToDispatch = (CMTimeGetSeconds(asset.duration) - currentDuration);
if ([typeOfMedia isEqualToString:kVideo])
{
timeToDispatch = 0.0f;
}
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeToDispatch * NSEC_PER_SEC)), dispatch_get_main_queue(),
^{
[theImageView setHidden:YES];
[theMoviePlayer pause];
if ([audioPlayer isPlaying])
{
[audioPlayer stop];
[timer invalidate];
timer = nil;
}
[theMoviePlayer seekToTime:kCMTimeZero
completionHandler:^(BOOL finished)
{
}];
});
});
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0), ^{
dispatch_async(dispatch_get_main_queue(), ^{
//[[UIApplication sharedApplication] endIgnoringInteractionEvents];
weakbtnPlay.hidden = NO;
});
});
}];
// isPlayAgain = FALSE;
// urlLastToPlay = newFilterVideoURL;
// APP_DELEGATE.urlProcessedVideo = newFilterVideoURL;
}
@catch (NSException *exception)
{
NSLog(@"%@",exception);
}
});
}