GPUImageFramebuffer framebuffer overrelease错误

时间:2015-04-15 19:39:01

标签: ios objective-c gpuimage

我根据FilterShowcase-> UI元素中的示例代码创建了此代码,我一直在崩溃。该代码的目标是在录制时将秒表或倒数计时器放在视频之上。

以下是例外情况:

*** Assertion failure in -[GPUImageFramebuffer unlock], /Users/itjunkii/Dropbox/Workspace-iOS/PocketWOD/Pods/GPUImage/framework/Source/GPUImageFramebuffer.m:269

2015-04-15 12:06:16.063 RSSReader[5416:730515] *** Terminating app due to uncaught exception 'NSInternalInconsistencyException', reason: 'Tried to overrelease a framebuffer, did you forget to call -useNextFrameForImageCapture before using -imageFromCurrentFramebuffer?'

*** First throw call stack:

(0x1852b42d8 0x196a800e4 0x1852b4198 0x186168ed4 0x100546220 0x100584cc0 0x100540510 0x100585550 0x10053df4c 0x100540558 0x10058c9b4 0x10058d2f0 0x10058deb0 0x101418fd4 0x101418f94 0x101423db8 0x10141c2c4 0x1014265d4 0x101428248 0x1972b122c 0x1972b0ef0)

libc++abi.dylib: terminating with uncaught exception of type NSException

以下是代码:

- (void)editVideo:(BOOL)isFront
{
    NSString *preset = AVCaptureSessionPreset1280x720;

    float presetW = 720;
    float presetH = 1280;

    NSString *device = [UIDevice currentDevice].platformString;
    //NSLog(@"device = %@", device);

    if ([device isEqualToString:@"iPhone 4S"] ||
        [device isEqualToString:@"iPhone 4"] ||
        [device isEqualToString:@"iPad mini 1G"]) {
        preset = AVCaptureSessionPreset640x480;
        presetW = 480.f;
        presetH = 640.f;
    } else {
        preset = AVCaptureSessionPreset1280x720;
        presetW = 720.f;
        presetH = 1280.f;
    }

    if (!isFront)
        videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:preset
                                                      cameraPosition:AVCaptureDevicePositionBack];
    else
        videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:preset
                                                          cameraPosition:AVCaptureDevicePositionFront];

    videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait;
    videoCamera.horizontallyMirrorFrontFacingCamera = NO;
    videoCamera.horizontallyMirrorRearFacingCamera = NO;
    //videoCamera.audioEncodingTarget = nil;

    filter = [[GPUImageBrightnessFilter alloc] init];
    [filter forceProcessingAtSizeRespectingAspectRatio:CGSizeMake(presetW, presetH)];

    GPUImageAlphaBlendFilter *blendFilter = [[GPUImageAlphaBlendFilter alloc] init];
    blendFilter.mix = 1.0;
    [blendFilter forceProcessingAtSizeRespectingAspectRatio:CGSizeMake(presetW, presetH)];

    UIView *contentView = [[UIView alloc] initWithFrame:CGRectMake(0,0,screenWidth,screenHeight-20)];
    contentView.backgroundColor = [UIColor clearColor];

    CGFloat vWidth = [[UIScreen mainScreen] bounds].size.width;
    CGFloat vHeight = [[UIScreen mainScreen] bounds].size.height;

    UIView *bg = [[UIView alloc] initWithFrame:CGRectMake(0,vHeight-100,vWidth,100)];
    bg.backgroundColor = [UIColor colorWithRed:0 green:0 blue:0 alpha:0.5];
    [contentView addSubview:bg];

    float bgHeight = bg.frame.size.height;
    timeLabel = [[UILabel alloc] initWithFrame:CGRectMake(0,vHeight-(bgHeight+10), vWidth, bgHeight)];
    timeLabel.font = [UIFont fontWithName:@"HelveticaNeue-Bold" size:68.0f];
    timeLabel.text = @"00:00.00";
    timeLabel.textAlignment = NSTextAlignmentCenter;
    timeLabel.textColor = [UIColor redColor];
    timeLabel.tag = 1;
    timeLabel.backgroundColor = [UIColor clearColor];
    [contentView addSubview:timeLabel];

    [contentView addSubview:cDownLabel];

    uiElementInput = [[GPUImageUIElement alloc] initWithView:contentView];

    [filter addTarget:blendFilter];
    [uiElementInput addTarget:blendFilter];

    [movieFile addTarget:filter];

    // Only rotate the video for display, leave orientation the same for recording
    GPUImageView *filterView = (GPUImageView *)vwVideo;
    //[filter addTarget:filterView];
    [blendFilter addTarget:filterView];

    [videoCamera addTarget:filter];

#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Warc-retain-cycles"
#pragma clang diagnostic ignored "-Wgnu"
    __block BOOL isFirstRun = NO;
    __unsafe_unretained GPUImageUIElement *weakUIElementInput = uiElementInput;
    [filter setFrameProcessingCompletionBlock:^(GPUImageOutput * filter, CMTime frameTime){

        NSDate *currentDate = [NSDate date];
        NSTimeInterval timeInterval = [currentDate timeIntervalSinceDate:sTime];
        NSDate *timerDate = [NSDate dateWithTimeIntervalSince1970:timeInterval];

        NSDate *futureDate = [sTime dateByAddingTimeInterval:(minutes*60)];

        if (isCountdown) {
            cDownLabel.text = [NSString stringWithFormat:@"%i", countSeconds];
        }

        if (isRecording && !isTimeSet) {
            sTime = [NSDate date];
            isTimeSet = YES;
        }

        float elapsedTime = -[sTime timeIntervalSinceNow];

        if (isRecording && !isAmrap) {
            if (elapsedTime < 3600) {
                [dateFormatter setDateFormat:@"mm:ss.SS"];
            } else {
                [dateFormatter setDateFormat:@"HH:mm:ss.SS"];
                timeLabel.font = [UIFont fontWithName:@"HelveticaNeue-Bold" size:52.0f];
            }
            [dateFormatter setTimeZone:[NSTimeZone timeZoneForSecondsFromGMT:0.0]];
            NSString *timeString=[dateFormatter stringFromDate:timerDate];
            timeLabel.text = timeString;
        } else if (isRecording && isAmrap) {
            NSTimeInterval iv = [futureDate timeIntervalSinceNow];
            int h = iv / 3600;
            int m = (iv - h * 3600) / 60;
            int s = iv - h * 3600 - m * 60;
            if (h <= 0)
                timeLabel.text = [NSString stringWithFormat:@"%02d:%02d", m, s];
            else
                timeLabel.text = [NSString stringWithFormat:@"%02d:%02d:%02d", h, m, s];
            if (h + m + s <= 0 && isFirstRun) {
                [buzzerPlayer play];
                [self stopRecording:nil];
            }
            isFirstRun = YES;
        }
        [weakUIElementInput update];
    }];

    NSDate *currentDate = [NSDate date];
    NSDateFormatter *dFormatter = [[NSDateFormatter alloc] init];
    [dFormatter setDateFormat:@"mmddyy"];
    [dFormatter setTimeZone:[NSTimeZone timeZoneForSecondsFromGMT:0.0]];
    fileNameString=[dFormatter stringFromDate:currentDate];

    pathToMovie = [NSHomeDirectory()
                   stringByAppendingPathComponent:[NSString stringWithFormat:@"Documents/%@.mov", fileNameString]];

    //NSLog(@"path = %@", pathToMovie);

    unlink([pathToMovie UTF8String]); // If a file already exists, AVAssetWriter won't let you record new frames, so delete the old movie
    movieURL = [NSURL fileURLWithPath:pathToMovie];

    NSDictionary* settings = [NSDictionary dictionaryWithObjectsAndKeys:
                              AVVideoCodecH264,AVVideoCodecKey,
                              [NSDictionary dictionaryWithObjectsAndKeys:
                               [NSNumber numberWithInt:2000000],AVVideoAverageBitRateKey,
                               AVVideoProfileLevelH264High40,AVVideoProfileLevelKey,
                               [NSNumber numberWithInt:30],AVVideoMaxKeyFrameIntervalKey,nil],
                              AVVideoCompressionPropertiesKey,
                              [NSNumber numberWithFloat:presetW], AVVideoWidthKey,
                              [NSNumber numberWithFloat:presetH], AVVideoHeightKey, nil];

    //movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(720.0, 1280.0)];
    movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL
                                                           size:CGSizeMake(presetW, presetH)
                                                       fileType:AVFileTypeQuickTimeMovie
                                                 outputSettings:settings];

    movieWriter.encodingLiveVideo = YES;
    [blendFilter addTarget:movieWriter];
    videoCamera.audioEncodingTarget = movieWriter;

    [videoCamera startCameraCapture];

我出错的任何想法?

0 个答案:

没有答案