我的iphone应用程序占用了大量内存

时间:2014-02-07 04:16:25

标签: ios iphone opencv memory-leaks out-of-memory

我使用UICollectionView制作了图库应用程序,但是我在下面的分配方面表现不佳。

enter image description here

我找不到它的坏处。我应该在哪里明确释放对象? 请告诉我。

以下代码对此表示怀疑。

在collectionView中,

- (UICollectionViewCell *)collectionView:(UICollectionView *)collectionView cellForItemAtIndexPath:(NSIndexPath *)indexPath
...
dispatch_async(all_queue, ^{
    ALAssetRepresentation *representation = [asset defaultRepresentation];
    UIImage *image = [UIImage imageWithCGImage:[representation fullResolutionImage]
                                         scale:[representation scale]
                                   orientation:(UIImageOrientation)[representation orientation]];
    NSString *filename = [representation filename];
    NSLog(@"%@", filename);
    NSLog(@"Loaded Image row : %d", indexPath.row);

    vector<cv::Rect> faces = [ImageUtils findFeature:image minsize:MIN_FACE_SIZE
                                         withCascade:face_cascade];

    Mat imageMat = [ImageUtils cvMatFromUIImage:image];
    for(unsigned int i = 0; i < es.size(); ++i) {
        rectangle(imageMat, cv::Point(es[i].x, es[i].y),
                  cv::Point(es[i].x + es[i].width, es[i].y + es[i].height),
                  cv::Scalar(0,255,255),5);
    }
    dispatch_async(dispatch_get_main_queue(), ^{
        [faceImage setImage:[ImageUtils UIImageFromCVMat:imageMat]];
        [cell setNeedsDisplay];
    });
});
return cell;
}

叫方法

+ (cv::Mat)cvMatFromUIImage:(UIImage *)image
{
CGColorSpaceRef colorSpace = CGImageGetColorSpace(image.CGImage);
CGFloat cols = image.size.width;
CGFloat rows = image.size.height;

cv::Mat cvMat(rows, cols, CV_8UC4); // 8 bits per component, 4 channels (color channels + alpha)

CGContextRef contextRef = CGBitmapContextCreate(cvMat.data,                 // Pointer to  data
                                                cols,                       // Width of bitmap
                                                rows,                       // Height of bitmap
                                                8,                          // Bits per component
                                                cvMat.step[0],              // Bytes per row
                                                colorSpace,                 // Colorspace
                                                kCGImageAlphaNoneSkipLast |
                                                kCGBitmapByteOrderDefault); // Bitmap info flags

CGContextDrawImage(contextRef, CGRectMake(0, 0, cols, rows), image.CGImage);
CGContextRelease(contextRef);
CGColorSpaceRelease(colorSpace);

return cvMat;
}

另一种方法

+ (UIImage *)UIImageFromCVMat:(cv::Mat)cvMat
{
NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize()*cvMat.total()];
CGColorSpaceRef colorSpace;

if (cvMat.elemSize() == 1) {
    colorSpace = CGColorSpaceCreateDeviceGray();
} else {
    colorSpace = CGColorSpaceCreateDeviceRGB();
}

CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);

// Creating CGImage from cv::Mat
CGImageRef imageRef = CGImageCreate(cvMat.cols,                                 //width
                                    cvMat.rows,                                 //height
                                    8,                                          //bits per component
                                    8 * cvMat.elemSize(),                       //bits per pixel
                                    cvMat.step[0],                            //bytesPerRow
                                    colorSpace,                                 //colorspace
                                    kCGImageAlphaNone|kCGBitmapByteOrderDefault,// bitmap info
                                    provider,                                   //CGDataProviderRef
                                    NULL,                                       //decode
                                    false,                                      //should interpolate
                                    kCGRenderingIntentDefault                   //intent
                                    );


// Getting UIImage from CGImage
UIImage *finalImage = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);

return finalImage;
}

另一种方法

+(cv::vector<cv::Rect>)findFeature:(UIImage *)image minsize:(cv::Size)minSize withCascade:(CascadeClassifier)cascade
{
vector<cv::Rect> faces;
Mat frame_gray;
Mat imageMat = [ImageUtils cvMatFromUIImage:image];

cvtColor(imageMat, frame_gray, CV_BGRA2GRAY);
equalizeHist(frame_gray, frame_gray);

cascade.detectMultiScale(frame_gray, faces, 1.1, 2, 0 | CV_HAAR_SCALE_IMAGE, minSize);
frame_gray.release();
imageMat.release();

return faces;
}

2 个答案:

答案 0 :(得分:0)

因为你的UImage分辨率太高了。你必须找到一种方法来减小它的大小。

答案 1 :(得分:0)

创建集合视图单元格时使用dequeueReusableCellWithReuseIdentifier

同时调整您正在处理的图像的大小,这肯定会减小您的尺寸。