由于打了一打其他网站(以及这个网站)寻找我问题的直接答案,我在这个问题上结束了我的智慧......
首先,我试图找出一种方法,给定通过[UIImage imageNamed:]创建的起始图像,以根据时间的处理添加装饰。 (想想热图。)我很早就意识到我需要复制图像,因为通过imageNamed调用的图像是在一个特殊的缓存中,所以我正在复制原始数据并通过它创建新的图像和上下文底层的Core Graphics API。虽然我已经能够复制图像并将其绑定到UIView,但我无法弄清楚如何更改其内容......以下是我目前在我的代码中所做的...
@interface HeatMapTestViewController : UIViewController {
CFDataRef imageData;
CGColorSpaceRef colorSpace;
CGContextRef context;
CGColorRef paintColor;
CGImageRef image;
}
- (IBAction) imageButtonTapped:(UIButton*)sender forEvent:(UIEvent*)event;
@end
@implementation HeatMapTestViewController
- (void) viewDidLoad {
[super viewDidLoad];
UIImage* backImage = [UIImage imageNamed:@"Grey Checkerboard.png"];
UIColor* backdrop = [UIColor colorWithPatternImage:backImage];
[[self view] setBackgroundColor:backdrop];
UIImage* startImage = [UIImage imageNamed:@"Starting Image.png"];
CGImageRef pixelmap = [startImage CGImage];
// http://www.iphonedevsdk.com/forum/iphone-sdk-development/34247-cgimage-pixel-array.html
CGDataProviderRef dataProvider = CGImageGetDataProvider(pixelmap);
imageData = CGDataProviderCopyData(dataProvider);
void* rawPixels = (void*)CFDataGetBytePtr(imageData);
colorSpace = CGColorSpaceCreateDeviceRGB();
// http://developer.apple.com/mac/library/qa/qa2001/qa1037.html
CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(pixelmap);
if ( ( bitmapInfo & kCGBitmapAlphaInfoMask ) == kCGImageAlphaLast )
bitmapInfo = ( bitmapInfo & kCGBitmapByteOrderMask ) | kCGImageAlphaPremultipliedLast;
if ( ( bitmapInfo & kCGBitmapAlphaInfoMask ) == kCGImageAlphaFirst )
bitmapInfo = ( bitmapInfo & kCGBitmapByteOrderMask ) | kCGImageAlphaPremultipliedFirst;
context = CGBitmapContextCreate(
rawPixels,
CGImageGetWidth(pixelmap),
CGImageGetHeight(pixelmap),
CGImageGetBitsPerComponent(pixelmap),
CGImageGetBytesPerRow(pixelmap),
colorSpace,
bitmapInfo
);
CGFloat components[] = {1.0, 0.0, 0.0, 1.0};
paintColor = CGColorCreate(colorSpace, components);
image = CGBitmapContextCreateImage(context);
UIImage* newImage = [UIImage imageWithCGImage:image];
UIButton* button = (id)[[self view] viewWithTag:327];
[button setBackgroundImage:newImage forState:UIControlStateNormal];
}
- (IBAction) imageButtonTapped:(UIButton*)sender forEvent:(UIEvent*)event {
UITouch* touch;
CGPoint touchPoint;
touch = [[event touchesForView:sender] anyObject];
// Assuming we always just get one due to IB setting...
touchPoint = [touch locationInView:sender];
CGRect touchRect = CGRectZero;
touchRect.origin = touchPoint;
touchRect = CGRectInset(touchRect, -11.0, -11.0);
UIGraphicsPushContext(context);
CGContextSetFillColorWithColor(context, paintColor);
CGContextFillEllipseInRect(context, touchRect);
UIGraphicsPopContext();
UIImage* newImage = [UIImage imageWithCGImage:image];
[sender setBackgroundImage:newImage forState:UIControlStateNormal];
[sender setNeedsDisplayInRect:touchRect];
} // imageButtonTapped:forEvent:
@end
实时点击反馈仅用于演示目的......我真正计划在最终产品中做的是在他/她当前正在查看的屏幕后面的用户请求上显示修改后的图像。我的Google-fu一直让我失望,我似乎无法在Apple的文档中找到示例代码。我甚至尝试为iPhone找到“绘画”应用程序的代码示例,因为这种类型的应用程序似乎分享了我想要完成的功能。没有骰子。 / - :
编辑:经过更多的实验和研究,我采用了不同的方法来实现这样的工作......@interface HeatMapTestViewController : UIViewController {
CGColorSpaceRef colorSpace;
CGColorRef paintColor;
}
- (IBAction) imageButtonTapped:(UIButton*)sender forEvent:(UIEvent*)event;
@end
@implementation HeatMapTestViewController
- (void) viewDidLoad {
[super viewDidLoad];
UIImage* backImage = [UIImage imageNamed:@"Grey Checkerboard.png"];
UIColor* backdrop = [UIColor colorWithPatternImage:backImage];
[[self view] setBackgroundColor:backdrop];
UIImage* startImage = [UIImage imageNamed:@"Starting Image.png"];
UIButton* button = (id)[[self view] viewWithTag:327];
[button setBackgroundImage:startImage forState:UIControlStateNormal];
colorSpace = CGColorSpaceCreateDeviceRGB();
CGFloat components[] = {1.0, 0.0, 0.0, 1.0};
paintColor = CGColorCreate(colorSpace, components);
}
- (IBAction) imageButtonTapped:(UIButton*)sender forEvent:(UIEvent*)event {
UITouch* touch;
CGPoint touchPoint;
touch = [[event touchesForView:sender] anyObject];
// Assuming we always just get one due to IB setting...
touchPoint = [touch locationInView:sender];
CGRect touchRect = CGRectZero;
touchRect.origin = touchPoint;
touchRect = CGRectInset(touchRect, -11.0, -11.0);
UIImage* image = [sender backgroundImageForState:UIControlStateNormal];
// http://www.ipodtouchfans.com/forums/showthread.php?t=132024
UIGraphicsBeginImageContext([image size]);
CGContextRef context = UIGraphicsGetCurrentContext();
[image drawInRect:CGRectMake(0.0, 0.0, [image size].width, [image size].height)];
// This repaints the entire image! Boo hiss!
CGContextSetFillColorWithColor(context, paintColor);
CGContextFillEllipseInRect(context, touchRect);
[sender setBackgroundImage:UIGraphicsGetImageFromCurrentImageContext()
forState:UIControlStateNormal];
UIGraphicsEndImageContext();
} // imageButtonTapped:forEvent:
@end
评论中指出了这个问题的重大问题。如果我正确理解上述内容,我会将图像的全部内容重新绘制成一个新内容,只是为了更新它的一小部分。这听起来不像我能以每秒20-30次的速度逃脱。
答案 0 :(得分:1)
这就是我最终的工作......但我仍然不相信我正在做的事情......
@interface HeatMapTestViewController : UIViewController {
void* imageData;
CGContextRef context;
}
- (IBAction) imageButtonTapped:(UIButton*)sender forEvent:(UIEvent*)event;
@end
@implementation HeatMapTestViewController
- (void) viewDidLoad {
[super viewDidLoad];
UIImage* backImage = [UIImage imageNamed:@"Grey Checkerboard.png"];
UIColor* backdrop = [UIColor colorWithPatternImage:backImage];
[[self view] setBackgroundColor:backdrop];
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGFloat components[] = {1.0, 0.0, 0.0, 0.2};
CGColorRef paintColor = CGColorCreate(colorSpace, components);
UIImage* startImage = [UIImage imageNamed:@"Starting Image.png"];
imageData = NSZoneMalloc(NSDefaultMallocZone(), [startImage size].width * [startImage size].height * 4);
// http://developer.apple.com/mac/library/qa/qa2001/qa1037.html
context = CGBitmapContextCreate(
imageData,
[startImage size].width,
[startImage size].height,
8,
[startImage size].height * 4,
colorSpace,
kCGImageAlphaPremultipliedLast
);
// Why is this needed? I'm wrapping the final product in a UIImage!
// http://developer.apple.com/iphone/library/documentation/GraphicsImaging/Conceptual/drawingwithquartz2d/dq_context/dq_context.html#//apple_ref/doc/uid/TP30001066-CH203-SW9
CGContextTranslateCTM(context, 0.0, [startImage size].height);
CGContextScaleCTM(context, 1.0, -1.0);
CGContextSetFillColorWithColor(context, paintColor);
CGColorRelease(paintColor);
CGColorSpaceRelease(colorSpace);
UIGraphicsPushContext(context);
[startImage drawInRect:CGRectMake(0.0, 0.0, [startImage size].width, [startImage size].height)];
CGContextFlush(context);
UIGraphicsPopContext();
CGImageRef image = CGBitmapContextCreateImage(context); // Copy! Boo!
UIImage* newImage = [UIImage imageWithCGImage:image];
CGImageRelease(image);
UIButton* button = (id)[[self view] viewWithTag:327];
[button setBackgroundImage:newImage forState:UIControlStateNormal];
}
- (IBAction) imageButtonTapped:(UIButton*)sender forEvent:(UIEvent*)event {
UITouch* touch;
CGPoint touchPoint;
touch = [[event touchesForView:sender] anyObject];
// Assuming we always just get one due to IB setting...
touchPoint = [touch locationInView:sender];
CGRect touchRect = CGRectZero;
touchRect.origin = touchPoint;
touchRect = CGRectInset(touchRect, -11.0, -11.0);
UIGraphicsPushContext(context);
CGContextFillEllipseInRect(context, touchRect);
CGContextFlush(context);
UIGraphicsPopContext();
CGImageRef image = CGBitmapContextCreateImage(context); // Copy! Boo!
UIImage* newImage = [UIImage imageWithCGImage:image];
CGImageRelease(image);
[sender setBackgroundImage:newImage forState:UIControlStateNormal];
} // imageButtonTapped:forEvent:
@end