读取视频帧iPhone时的内存问题

时间:2012-03-27 10:14:48

标签: iphone ios video frame avurlasset

从iPhone库中选择的现有视频中读取视频帧时,我遇到了内存问题。首先我将UIImage帧本身添加到一个数组中,但我认为该数组在一段时间内对于内存来说太大了,所以我将UIImages保存在文档文件夹中并将imagepath添加到数组中。但是,即使使用Instruments检查分配,我仍然会收到相同的内存警告。总分配内存永远不会超过2.5mb。也没有发现泄漏......有人能想到什么吗?

-(void)addFrame:(UIImage *)image
{
    NSString *imgPath = [NSString stringWithFormat:@"%@/Analysis%d-%d.png", docFolder, currentIndex, framesArray.count];       
    [UIImagePNGRepresentation(image) writeToFile:imgPath atomically:YES];
    [framesArray addObject:imgPath];    
    frameCount++;      
}

-(void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info
{
    [picker dismissModalViewControllerAnimated:YES];
    [framesArray removeAllObjects];    
    frameCount = 0;          

    // incoming video
    NSURL *videoURL = [info valueForKey:UIImagePickerControllerMediaURL];
    //NSLog(@"Video : %@", videoURL);

    // AVURLAsset to read input movie (i.e. mov recorded to local storage)
    NSDictionary *inputOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
    AVURLAsset *inputAsset = [[AVURLAsset alloc] initWithURL:videoURL options:inputOptions];     

    // Load the input asset tracks information
    [inputAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler: ^{        

        NSError *error = nil;
        nrFrames = CMTimeGetSeconds([inputAsset duration]) * 30;
        NSLog(@"Total frames = %d", nrFrames);

        // Check status of "tracks", make sure they were loaded    
        AVKeyValueStatus tracksStatus = [inputAsset statusOfValueForKey:@"tracks" error:&error];
        if (!tracksStatus == AVKeyValueStatusLoaded)
            // failed to load
            return;        

        /* Read video samples from input asset video track */
        AVAssetReader *reader = [AVAssetReader assetReaderWithAsset:inputAsset error:&error];

        NSMutableDictionary *outputSettings = [NSMutableDictionary dictionary];
        [outputSettings setObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA]  forKey: (NSString*)kCVPixelBufferPixelFormatTypeKey];
        AVAssetReaderTrackOutput *readerVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:[[inputAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] outputSettings:outputSettings];


        // Assign the tracks to the reader and start to read
        [reader addOutput:readerVideoTrackOutput];
        if ([reader startReading] == NO) {
            // Handle error
            NSLog(@"Error reading");
        }

        NSAutoreleasePool *pool = [NSAutoreleasePool new];
        while (reader.status == AVAssetReaderStatusReading)
        {            
            if(!memoryProblem)
            {
                CMSampleBufferRef sampleBufferRef = [readerVideoTrackOutput copyNextSampleBuffer];
                if (sampleBufferRef) 
                {
                    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBufferRef);
                    /*Lock the image buffer*/
                    CVPixelBufferLockBaseAddress(imageBuffer,0); 
                    /*Get information about the image*/
                    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer); 
                    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 
                    size_t width = CVPixelBufferGetWidth(imageBuffer); 
                    size_t height = CVPixelBufferGetHeight(imageBuffer); 

                    /*We unlock the  image buffer*/
                    CVPixelBufferUnlockBaseAddress(imageBuffer,0);

                    /*Create a CGImageRef from the CVImageBufferRef*/
                    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 
                    CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); 
                    CGImageRef newImage = CGBitmapContextCreateImage(newContext); 

                    /*We release some components*/
                    CGContextRelease(newContext); 
                    CGColorSpaceRelease(colorSpace);

                    UIImage *image= [UIImage imageWithCGImage:newImage scale:[UIScreen mainScreen].scale orientation:UIImageOrientationRight];          
                    //[self addFrame:image];
                    [self performSelectorOnMainThread:@selector(addFrame:) withObject:image waitUntilDone:YES];

                    /*We release the CGImageRef*/
                    CGImageRelease(newImage);                    

                    CMSampleBufferInvalidate(sampleBufferRef);
                    CFRelease(sampleBufferRef);
                    sampleBufferRef = NULL;
                }
            }
            else 
            {                
                break;
            }            
        }
        [pool release];

        NSLog(@"Finished");        
    }];   
}

1 个答案:

答案 0 :(得分:2)

你做了一件事并尝试。

NSAutoreleasePool移到while循环内并将其排放到循环内。

所以它会如下所示:

while (reader.status == AVAssetReaderStatusReading)
{            
    NSAutoreleasePool *pool = [NSAutoreleasePool new];

    .....

    [pool drain];
}