(GPUMemory Crashes)将效果应用于图像数组

时间:2015-10-15 09:25:49

标签: ios objective-c

Instrument leaks ,this is what i'm getting when i run instruments for leaks

-(void)imageToVideoConversion:(NSMutableArray *)arrayOfImages
{

    NSLog(@"(((((((((((((((((((())))))))))))))))))))) no of images:%lu",(unsigned long)[arrayOfImages count]);

    NSArray *paths = nil;
    NSString *documentsDirectory = nil;


    paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);

    documentsDirectory = [paths objectAtIndex:0];

    NSString *videoOutputPath =  [documentsDirectory stringByAppendingPathComponent:
                        [NSString stringWithFormat:@"ImageVideo-%d.mov",arc4random() % 1000]];



    NSError *error = nil;
    NSUInteger fps = 30;


    imageSize = CGSizeMake(640, 640);

    NSLog(@"Start building video from defined frames.");

    videoWriter = [[AVAssetWriter alloc] initWithURL:
                   [NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
                                               error:&error];
    NSParameterAssert(videoWriter);

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,
                                   [NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
                                   nil];

    AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
                                            assetWriterInputWithMediaType:AVMediaTypeVideo
                                            outputSettings:videoSettings];


    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                     assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
                                                     sourcePixelBufferAttributes:nil];

    NSParameterAssert(videoWriterInput);
    NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
    videoWriterInput.expectsMediaDataInRealTime = YES;
    [videoWriter addInput:videoWriterInput];

    //Start a session:
    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:kCMTimeZero];


    int frameCount = 0;
    double numberOfSecondsPerFrame =  1.0 * 3.0;

    NSLog(@"Frame per seconds:%f", numberOfSecondsPerFrame);

    double frameDuration = fps * numberOfSecondsPerFrame;



    for(int i = 0; i<=[arrayOfImages count]; i++)
    {


        UIImage *originalImage = nil;
        NSString *nameImage = nil;
        UILabel *title = nil;
        UIColor *cur_color;



        if (i == 0)
        {
            nameImage = [NSString stringWithFormat:@"TitleImage_%lu.png",[[SingletonClass sharedManager] titleValue]+1];
            cur_color = [UIColor colorWithRed:(rgbValues[[[SingletonClass sharedManager] titleValue]].redValue/255) green:(rgbValues[[[SingletonClass sharedManager] titleValue]].greenValue/255) blue:(rgbValues[[[SingletonClass sharedManager] titleValue]].bluevalue/255) alpha:1.0];

            UIImageView *snapshotView = [[UIImageView alloc]initWithFrame:CGRectMake(0, 0, 640.0, 640.0)];
            snapshotView.image = [UIImage imageNamed:nameImage];



            title = [[UILabel alloc]initWithFrame:CGRectMake(0, 0, 640.0, 100.0)];
            title.center = CGPointMake(snapshotView.frame.size.width/2, snapshotView.frame.size.height/2);
            title.text = self.titleText.text;
            title.textColor = cur_color;
            title.textAlignment = NSTextAlignmentCenter;
            title.font = [UIFont fontWithName:[NSString stringWithFormat:@"%@",(font_styles)[[[SingletonClass sharedManager] titleValue]].fontStyle] size:75];
            title.backgroundColor = [UIColor clearColor];
            [snapshotView addSubview:title];



            UIGraphicsBeginImageContext(snapshotView.frame.size);
            [snapshotView.layer renderInContext:UIGraphicsGetCurrentContext()];
            originalImage = UIGraphicsGetImageFromCurrentImageContext();
            UIGraphicsEndImageContext();


        }

        else
        {
            /***************** Add Border ***************/


            NSData *n_dat = UIImageJPEGRepresentation([arrayOfImages objectAtIndex:i-1], 0.8);
            originalImage = [UIImage imageWithData:n_dat];

            NSString *imageString = [NSString stringWithFormat:@"border.png"];

            UIImage *borderImage = [UIImage imageNamed:imageString];
            CGSize size1 = CGSizeMake(640.0,640.0);
            UIGraphicsBeginImageContext(size1);

            [originalImage drawInRect:CGRectMake(0,0,640.0,640.0)];
            [borderImage drawInRect:CGRectMake(0,0,640.0,640.0)];

            originalImage = UIGraphicsGetImageFromCurrentImageContext();
            UIGraphicsEndImageContext();


            /********************************************/
        }


        UIImage *imageToAddBuffer = nil;





        if (isEffectSelected == YES)
        {


            NSLog(@"Priningintg the current effect number:%d",currentEffectNumber);

            imageToAddBuffer = [self add_EffectOnImage:originalImage effectNumber:currentEffectNumber];

            NSData* imgData = UIImagePNGRepresentation(imageToAddBuffer);
            imageToAddBuffer = [UIImage imageWithData:imgData];
        }
        else
        {
            imageToAddBuffer = originalImage;
        }


        CVPixelBufferRef buffer = nil;

        buffer = [self pixelBufferFromCGImage:[imageToAddBuffer CGImage]];
     //   [imageToAddBuffer release];
        imageToAddBuffer =nil;



        BOOL append_ok = NO;
        int j = 0;
        while (!append_ok && j < 30) {
            if (adaptor.assetWriterInput.readyForMoreMediaData)
            {

                CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);



                NSLog(@"Count of the array(((((((((((((()))))))))) %d",i);

                append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
                if (buffer!= NULL) {
                    CVPixelBufferRelease(buffer);
                }

                //[pool  release];


                if(!append_ok){
                    NSError *error = videoWriter.error;
                    if(error!=nil) {
                        NSLog(@"Unresolved error %@,%@.", error, [error userInfo]);
                    }
                }


            }
            else {
                printf("adaptor not ready %d, %d\n", frameCount, j);
                [NSThread sleepForTimeInterval:0.1];
            }
            j++;
        }
        if (!append_ok) {
            printf("error appending image %d times %d\n, with error.", frameCount, j);
        }
        frameCount++;


    }


    //Finish the session:
    [videoWriterInput markAsFinished];

    [videoWriter finishWritingWithCompletionHandler:^(){



        NSLog(@"Came here----------");


        [self MergeAndSave:videoOutputPath];





    }];

    videoOutputPath = nil;

}

这里没有应用任何效果(改变颜色到图像)当我生成视频时它工作正常,但是当我应用效果并生成视频后,5个图像应用程序崩溃。(如果我只选择4个图像生成带有效果的视频它正在生成视频而没有任何崩溃。

当我运行仪器泄漏时,它会在gpuimagepicturs.mgpuimageoutput.m文件中显示红线,任何人都知道请提前帮助我。

0 个答案:

没有答案