如何在iOS中运行视频添加动态视觉效果?

时间:2014-04-08 07:50:27

标签: objective-c video ios7 xcode5 gpuimage

我想动态地将视觉效果更改为正在运行的视频。我使用 GPUImage 框架来改变视觉效果。我从Here下载了示例项目。在这个GPUImage中,我选择了SimpleVideoFileFilter示例。这个例子运行一个过滤器,我修改了代码,目前它支持10个过滤器。我的问题是,视频文件正在GPUImageView中播放,现在我选择另一个过滤器。突然,视频效果也在发生变化。但该视频从一开始就开始。我想动态更改当前播放视频的过滤器。 我的代码是:

#pragma mark - Play Video with Effects

- (void)getVideo:(NSURL *)url
{
    movieFile = [[GPUImageMovie alloc] initWithURL:url];

    movieFile.runBenchmark = YES;
    movieFile.playAtActualSpeed = YES;
    //    filter = [[GPUImagePixellateFilter alloc] init];


    [movieFile addTarget:filter];

    // Only rotate the video for display, leave orientation the same for recording
    filterView = (GPUImageView *)self.view;
    [filter addTarget:filterView];

    // In addition to displaying to the screen, write out a processed version of the movie to disk
    NSString *pathToMovie = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/Movie.m4v"];
    unlink([pathToMovie UTF8String]); // If a file already exists, AVAssetWriter won't let you record new frames, so delete the old movie
    NSURL *movieURL1 = [NSURL fileURLWithPath:pathToMovie];

    movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL1 size:CGSizeMake(640.0, 480.0)];
    [filter addTarget:movieWriter];

    // Configure this for video from the movie file, where we want to preserve all video frames and audio samples
    movieWriter.shouldPassthroughAudio = YES;
    movieFile.audioEncodingTarget = movieWriter;
    [movieFile enableSynchronizedEncodingUsingMovieWriter:movieWriter];

    [movieWriter startRecording];
    [movieFile startProcessing];

    [movieWriter setCompletionBlock:^{
        [filter removeTarget:movieWriter];
        [movieWriter finishRecording];
        UISaveVideoAtPathToSavedPhotosAlbum(pathToMovie, nil, nil, nil);
    }];
}

- (void)event:(UIButton*)sender
{
    [filter removeTarget:filterView];
    UIButton *selectedBtn = sender;
    [movieFile removeTarget:filter];
    switch (selectedBtn.tag)
    {
        case 0:
            filter = [[GPUImageBrightnessFilter alloc] init];
            break;
        case 1:
            filter = [[GPUImageGrayscaleFilter alloc] init];
            break;
        case 2:
            filter = [[GPUImageSketchFilter alloc] init];
            break;
        case 3:
            filter = [[GPUImageToonFilter alloc] init];
            break;
        case 4:
            filter = [[GPUImageMonochromeFilter alloc] init];
            break;
        case 5:
            filter = [[GPUImagePixellateFilter alloc] init];
            break;
        case 6:
            filter = [[GPUImageCrosshatchFilter alloc] init];
            break;
        case 7:
            filter = [[GPUImageVignetteFilter alloc] init];
            break;
        case 8:
            filter = [[GPUImageColorInvertFilter alloc] init];
            break;
        case 9:
            filter = [[GPUImageLevelsFilter alloc] init];
            [(GPUImageLevelsFilter *)filter setRedMin:1.0 gamma:1.0 max:0.0 minOut:0.5 maxOut:0.5];
            break;

        default:
            break;        
    }
    [self getVideo:movieURL];
}

请帮我解决此问题。

2 个答案:

答案 0 :(得分:0)

我自己找到了答案。解决方案是,

- (void)event:(UIButton*)sender
{
    //  isMoviePlayCompleted = NO;
    if (btnTag != sender.tag)
    {
        btnTag = (int)sender.tag;
        NSLog(@"tag:%d",btnTag);
        [self applyFilter:sender.tag];
    }

}

应用过滤器

    -(void) applyFilter:(NSInteger) tag
    {
        [[NSFileManager defaultManager] removeItemAtURL:saveTempUrl error:nil];
        recording = NO;
        switch (tag)
        {
            case 0:
                filter =nil;
                filter  = [[GPUImagePixellateFilter alloc] init];
                [(GPUImagePixellateFilter *)filter setFractionalWidthOfAPixel:0.0];
                break;
            case 1:
                filter =nil;
                filter = [[GPUImageGrayscaleFilter alloc] init];
                break;
            case 2:
                filter =nil;
                filter = [[GPUImageSketchFilter alloc] init];
                break;
            case 3:
                filter =nil;
                filter = [[GPUImageToonFilter alloc] init];
                break;
            case 4:
                filter =nil;
                filter = [[GPUImageMonochromeFilter alloc] init];
                break;
            case 5:
                filter =nil;
                filter = [[GPUImageVignetteFilter alloc] init];
                break;
      default:
                break;
        }

        [self getVideo:movieURL];
}

播放带效果的视频

- (void)getVideo:(NSURL *)url
{

        [filter removeAllTargets];
        movieFile.audioEncodingTarget = nil;
        [movieWriter cancelRecording];
        [movieFile cancelProcessing];
        [movieWriter finishRecording];
        movieWriter = nil;
        movieFile = nil;
        filterView = nil;
    recording = YES;
    anAsset = [[AVURLAsset alloc] initWithURL:url options:nil];
    movieFile = [[GPUImageMovie alloc] initWithURL:url];
        movieFile.delegate = self;
        movieFile.runBenchmark = NO;
    movieFile.playAtActualSpeed = YES;

        [movieFile addTarget:filter];

    // Only rotate the video for display, leave orientation the same for recording
    filterView = (GPUImageView *)self.view;

        [filter addTarget:filterView];

        NSString *pathName = [NSString stringWithFormat:@"Doc.MOV"];
        // In addition to displaying to the screen, write out a processed version of the movie to disk
        NSString *pathToMovie = [NSTemporaryDirectory() stringByAppendingPathComponent:pathName];
        NSFileManager *fileTmp = [[NSFileManager alloc] init];
        if ([fileTmp fileExistsAtPath:pathToMovie]) {
            [fileTmp removeItemAtPath:pathToMovie error:nil];
        }
        unlink([pathToMovie UTF8String]); // If a file already exists, AVAssetWriter won't let you record new frames, so delete the old movie
        saveTempUrl =  [NSURL fileURLWithPath:pathToMovie];
        movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:saveTempUrl size:size];
        [filter addTarget:movieWriter];
[movieFile enableSynchronizedEncodingUsingMovieWriter:movieWriter];
    [movieWriter startRecording];
    [movieFile startProcessing];
    __unsafe_unretained typeof(self) weakSelf = self;
    [weakSelf->movieWriter setCompletionBlock:^{

        NSLog(@"write completed");
        [filter removeTarget:movieWriter];
        [movieWriter finishRecording];
        movieWriter = nil;
        movieFile = nil;
        filterView = nil;
        recording = NO;
        if (saveFilter)
        {
            saveFilter = NO;
            UISaveVideoAtPathToSavedPhotosAlbum([saveTempUrl path], self, @selector(video:didFinishSavingWithError:contextInfo:), nil);
            shareFilter = YES;
        }

    }];
}

多数民众赞成。现在,当我选择任何过滤器时,它会新填充。所以内存问题就解决了。现在它适用于我的应用程序。

答案 1 :(得分:0)

// Use this code

 [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(movieFinished) name:MPMoviePlayerPlaybackDidFinishNotification object:videoPlayer];
[videoPlayer play];


-(void)movieFinished
{
[videoPlayer play];
 } 



-(void) playTheVideo:(NSURL *)videoURL
{
NSTimeInterval time= videoPlayer.currentPlaybackTime;
UIView *parentView = imageViewFiltered; // adjust as needed
CGRect bounds = parentView.bounds; // get bounds of parent view
CGRect subviewFrame = CGRectInset(bounds, 0, 0); 
videoPlayer.view.frame = subviewFrame;
videoPlayer.view.autoresizingMask = (UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight);
[parentView addSubview:videoPlayer.view];
videoPlayer.contentURL = videoURL;
[videoPlayer setCurrentPlaybackTime:time];
[videoPlayer stop];
NSLog(@"Videoplayer stop or play in this view ");
[videoPlayer play];
self.showLoading = NO;
self.showLoading =NO;
}