首次运行游戏时,我需要生成并保存320张图像作为PNG。然后将加载这些图像而不是再次生成。这是过程:
这是使用UIGraphicsBeginImageContextWithOptions完成的。此过程需要在后台线程中以10种颜色的32个图像模板完成。目的是这些将在此游戏中用作头像/个人资料图像,在适当的某些屏幕上按比例缩小。但是每次都不能生成它们,因为这会造成太多的延迟。
图像各为400x400。它们在存储时每个大约20/25 kB。当我尝试使用我当前的生成和存储方式时,我得到一个内存警告,我看到(使用Instruments)活着的CGImage和UIImage对象的数量不断增加。这似乎是他们被保留但我没有任何提及他们。
以下是我更接近详细说明我正在使用的代码的另一个问题:UIGraphicsBeginImageContext created image
创建和存储二次存储的最佳方式是什么?提前谢谢。
修改
以下是我目前用于创建和保存图像的完整代码:
//==========================================================
// Definitions and Macros
//==========================================================
//HEX color macro
#define UIColorFromRGB(rgbValue) [UIColor \
colorWithRed:((float)((rgbValue & 0xFF0000) >> 16))/255.0 \
green:((float)((rgbValue & 0xFF00) >> 8))/255.0 \
blue:((float)(rgbValue & 0xFF))/255.0 alpha:1.0]
//Colours
#define RED_COLOUR UIColorFromRGB(0xF65D58)
#define ORANGE_COLOUR UIColorFromRGB(0xFF8D16)
#define YELLOW_COLOUR UIColorFromRGB(0xFFD100)
#define LIGHT_GREEN_COLOUR UIColorFromRGB(0x82DE13)
#define DARK_GREEN_COLOUR UIColorFromRGB(0x67B74F)
#define TURQUOISE_COLOUR UIColorFromRGB(0x32ADA6)
#define LIGHT_BLUE_COLOUR UIColorFromRGB(0x11C9FF)
#define DARK_BLUE_COLOUR UIColorFromRGB(0x2E97F5)
#define PURPLE_COLOUR UIColorFromRGB(0x8F73FD)
#define PINK_COLOUR UIColorFromRGB(0xF35991)
#import "ViewController.h"
@implementation ViewController
- (void)viewDidLoad
{
[super viewDidLoad];
//Generate the graphics
[self generateAndSaveGraphics];
}
//==========================================================
// Generating and Saving Graphics
//==========================================================
-(void)generateAndSaveGraphics {
dispatch_async( dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[self createAvatarImages];
//Here create all other images that need to be saved to Cache directory
dispatch_async( dispatch_get_main_queue(), ^{ //Finished
NSLog(@"DONE"); //always runs out of memory before getting here
});
});
}
-(void)createAvatarImages {
//Create avatar images
NSArray *colours = [NSArray arrayWithObjects:RED_COLOUR, ORANGE_COLOUR, YELLOW_COLOUR, LIGHT_GREEN_COLOUR, DARK_GREEN_COLOUR, TURQUOISE_COLOUR, LIGHT_BLUE_COLOUR, DARK_BLUE_COLOUR, PURPLE_COLOUR, PINK_COLOUR, nil];
NSString *cacheDir = [NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES) lastObject];
for(int i = 0; i < 32; i++) { //Avatar image templates are named m1 - m16 and f1 - f16
NSString *avatarImageName;
if(i < 16) { //female avatars
avatarImageName = [NSString stringWithFormat:@"f%i", i+1];
}
else { //male avatars
avatarImageName = [NSString stringWithFormat:@"m%i", i-15];
}
for(int j = 0; j < colours.count; j++) { //make avatar image for each colour
@autoreleasepool { //only helps very slightly
UIColor *colour = [colours objectAtIndex:j];
UIImage *avatarImage = [self tintedImageFromImage:[UIImage imageNamed:avatarImageName] colour:colour intensity:0.3];
NSString *fileName = [NSString stringWithFormat:@"%@_%i.png", avatarImageName, j];
NSString *filePath = [cacheDir stringByAppendingPathComponent:fileName];
NSData *imageData = [NSData dataWithData:UIImagePNGRepresentation(avatarImage)];
[imageData writeToFile:filePath atomically:YES];
NSLog(@"AVATAR IMAGE CREATED");
}
}
}
}
//==========================================================
// Universal Image Tinting Code
//==========================================================
//Creates a tinted image based on the source greyscale image and tinting intensity
-(UIImage *)tintedImageFromImage:(UIImage *)sourceImage colour:(UIColor *)color intensity:(float)intensity {
if (UIGraphicsBeginImageContextWithOptions != NULL) {
UIGraphicsBeginImageContextWithOptions(sourceImage.size, NO, 0.0);
} else {
UIGraphicsBeginImageContext(sourceImage.size);
}
CGContextRef context = UIGraphicsGetCurrentContext();
CGRect rect = CGRectMake(0, 0, sourceImage.size.width, sourceImage.size.height);
// draw alpha-mask
CGContextSetBlendMode(context, kCGBlendModeNormal);
CGContextDrawImage(context, rect, sourceImage.CGImage);
// draw tint color, preserving alpha values of original image
CGContextSetBlendMode(context, kCGBlendModeSourceIn);
[color setFill];
CGContextFillRect(context, rect);
//Set the original greyscale template as the overlay of the new image
sourceImage = [self verticallyFlipImage:sourceImage];
[sourceImage drawInRect:CGRectMake(0,0, sourceImage.size.width,sourceImage.size.height) blendMode:kCGBlendModeMultiply alpha:intensity];
UIImage *colouredImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
colouredImage = [self verticallyFlipImage:colouredImage];
return colouredImage;
}
//Vertically flips an image
-(UIImage *)verticallyFlipImage:(UIImage *)originalImage {
UIImageView *tempImageView = [[UIImageView alloc] initWithImage:originalImage];
UIGraphicsBeginImageContext(tempImageView.frame.size);
CGContextRef context = UIGraphicsGetCurrentContext();
CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, tempImageView.frame.size.height);
CGContextConcatCTM(context, flipVertical);
[tempImageView.layer renderInContext:context];
UIImage *flippedImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return flippedImage;
}
@end
我已经创建了一个测试项目(在zip中)来说明问题:
为了将来参考,解决方案就是这一行代码:
tempImageView.image = nil;
感谢Matic。
答案 0 :(得分:0)
似乎问题出在方法verticallyFlipImage
中。图形上下文似乎保留了您创建的临时图像视图,并保留了您指定的图像。通常可以通过将每个图像作为其自己的调度调用推送到该过程来解决该问题:重新采样图像 - &gt;回调 - &gt;重新下一步(或退出)。
在整个重采样结束时,所有数据都被释放,并且没有内存泄漏。要快速修复,只需在返回图像之前调用tempImageView.image = nil;
即可。图像视图本身仍会产生内存膨胀,但它太小而不会产生任何影响。
这对我有用,我希望它可以帮到你。
编辑:添加了调度概念(评论参考)
dispatch_queue_t internalQueue;
- (void)createQueue {
dispatch_sync(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^(void) {
internalQueue = dispatch_queue_create("myQueue", DISPATCH_QUEUE_SERIAL); //we created a high priority queue
});
}
- (void)deleteQueue {
dispatch_release(internalQueue);
}
- (void)imageProcessingDone {
[self deleteQueue];
//all done here
}
- (void)processImagesInArray:(NSMutableArray *)imageArray {
//take out 1 of the objects (last in this case, you can do objectAtIndex:0 if you wish)
UIImage *img = [[imageArray lastObject] retain]; //note, image retained so the next line does not deallocate it (released at NOTE1)
[imageArray removeLastObject]; //remove from the array
dispatch_async(internalQueue, ^(void) { //dispach
//do all the image processing + saving
[img release];//NOTE1
//callback: In this case I push it the main thread. There should be little difference if you simply dispach it again on the internalQueue
if(imageArray.count > 0) {
[self performSelectorOnMainThread:@selector(processImagesInArray:) withObject:imageArray waitUntilDone:NO];
}
else {
[self performSelectorOnMainThread:@selector(imageProcessingDone) withObject:nil waitUntilDone:NO];
}
});
}