如何修复泄漏CVPixelBuffer

时间:2011-12-27 14:00:16

标签: iphone ios core-video

请告诉我这段代码在哪里泄漏......

//在这里,我使用文档目录中的图像进行视频

- (void) testCompressionSession:(NSString *)path
{
if ([[NSFileManager defaultManager] fileExistsAtPath:path]) {
    [[NSFileManager defaultManager] removeItemAtPath:path error:nil];
}
NSArray *array = [dictInfo objectForKey:@"sortedKeys"];

NSString *betaCompressionDirectory = path;
NSError *error = nil;

unlink([betaCompressionDirectory UTF8String]);

NSLog(@"array = %@",array);
NSData *imgDataTmp = [NSData dataWithContentsOfFile:[projectPath stringByAppendingPathComponent:[array objectAtIndex:0]]];
NSLog(@"link : %@",[projectPath stringByAppendingPathComponent:[array objectAtIndex:0]]);
CGSize size = CGSizeMake([UIImage imageWithData:imgDataTmp].size.width, [UIImage imageWithData:imgDataTmp].size.height);
//----initialize compression engine
NSLog(@"size : w : %f, h : %f",size.width,size.height);
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:betaCompressionDirectory]
                                                       fileType:AVFileTypeQuickTimeMovie
                                                          error:&error];
NSParameterAssert(videoWriter);
if(error)
    NSLog(@"error = %@", [error localizedDescription]);

NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
                               [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                               [NSNumber numberWithInt:size.height], AVVideoHeightKey, nil];
AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];

NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
                                                       [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                                                                                 sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);

if ([videoWriter canAddInput:writerInput])
    NSLog(@"I can add this input");
else
    NSLog(@"i can't add this input");

[videoWriter addInput:writerInput];

[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];

dispatch_queue_t    dispatchQueue = dispatch_queue_create("mediaInputQueue", NULL);

[writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
    //BOOL isEffect = NO;
    int i = 0;
    float totalTime = 0.0f;
    float nextTime = 0;
    if ([writerInput isReadyForMoreMediaData]) {
        while (1)
        {   
            if (i <= [array count] && i > 0) {
                nextTime = [[dictInfo objectForKey:[array objectAtIndex:i-1]] floatValue];
            }
            totalTime += i == 0 ? 0 : nextTime;
            CMTime presentTime=CMTimeMake(totalTime, 1);
            printf("presentTime : %f  ",CMTimeGetSeconds(presentTime));
            if (i >= [array count]) 
            {
                NSData *imgData = [NSData dataWithContentsOfFile:[projectPath stringByAppendingPathComponent:[array objectAtIndex:i-1]]];
                UIImage* tmpImg = [UIImage imageWithData:imgData];
                tmpImg = [self imageWithImage:tmpImg scaledToSize:size];
                while ( !writerInput.readyForMoreMediaData)
                {
                    sleep(0.01);
                }
                CVPixelBufferRef buffer = NULL;
                buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[tmpImg CGImage] size:size];
                [adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(totalTime-nextTime+(nextTime/2.0), 1)];
                NSLog(@"%f",totalTime-nextTime+(nextTime/2.0));
                [writerInput markAsFinished];
                [videoWriter finishWriting];
                //CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
                [videoWriter release];
                break;
            } else {
                NSData *imgData = [NSData dataWithContentsOfFile:[projectPath stringByAppendingPathComponent:[array objectAtIndex:i]]];
                UIImage* tmpImg = [UIImage imageWithData:imgData];
                //tmpImg = [self imageWithImage:tmpImg scaledToSize:size];
                //UIImageWriteToSavedPhotosAlbum(tmpImg, nil, nil, nil);
                while (!adaptor.assetWriterInput.readyForMoreMediaData && !writerInput.readyForMoreMediaData)
                {
                    sleep(0.01);
                }
                CVPixelBufferRef buffer = NULL;
                buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[tmpImg CGImage] size:size];
                if (buffer)
                {
                    if(![adaptor appendPixelBuffer:buffer withPresentationTime:presentTime])
                        NSLog(@"FAIL");
                    else
                        NSLog(@"Success:%d",i);
                    CVPixelBufferRelease(buffer);
                }
            }
    i++;
        }
    }
}];

//这里我从CGImageRef

做了CVPixelBufferRef
- (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                         [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey, 
                         [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
CVPixelBufferRef pxbuffer = NULL;

CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (CFDictionaryRef) options, &pxbuffer);

NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL); 

CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);

CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst);
NSParameterAssert(context);

CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);

CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;

泄漏日志是:

CVObject CFRetain 00:37.957.985 2 0x1ecae0 0 CoreVideo CVPixelBufferPool :: createPixelBuffer(__ CFAllocator const *,__ CFDictionary const *,int *) Malloc 96 Bytes Malloc 00:40.015.872 1 0x1f0750 96 CoreVideo CVBuffer :: init() CVPixelBuffer Malloc 00:40.969.716 1 0x1f2570 96 CoreVideo CVObject :: alloc(unsigned long,__ CFAllocator const *,unsigned long,unsigned long)

1 个答案:

答案 0 :(得分:7)

看这里:

CVPixelBufferRef buffer = NULL;
CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &buffer);
CVPixelBufferLockBaseAddress(buffer, 0);
buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[tmpImg CGImage] size:size];

首先创建一个像素缓冲区并将其地址置于信息缓冲区变量中,然后像pixelBufferFromCGImage覆盖同一个变量,因此其先前的内容无法再发布。

修改

你刚刚删除了我使用的代码,所以我的答案现在不再适用了。

现在这部分:

CVPixelBufferRef buffer = NULL;
buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[tmpImg CGImage] size:size];
[adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(totalTime-nextTime+(nextTime/2.0), 1)];
NSLog(@"%f",totalTime-nextTime+(nextTime/2.0));
...

你有一个注释掉CVPixelBufferPoolRelease(adaptor.pixelBufferPool),这没关系,因为在这个版本中你没有像素缓冲池,但我想念这里调用CVPixelBufferRelease(缓冲区)。