我正试图在iPhone应用程序中自动显示大部分透明png的有用部分。图像可以说是500x500,但它大多是透明的。在该图像中的某个地方是一个非透明的部分,我想尽可能大地向用户显示,所以我想从每一侧尽可能多地修剪(或者通过拉伸和移动来使它看起来像这样。 UIImageView。有什么想法吗?
答案 0 :(得分:1)
使用Quartz将图像转换为位图,检查alpha通道位以查找图像的非透明部分的边界。
以下是Apple Tech Note:Getting the pixel data from a CGImage object。您可以通过以下方式从UIImage获取CIImage:
CGImageRef imageRef = [uiImage CGImage];
答案 1 :(得分:0)
我制作了一种方法来执行此操作,即扫描图像中的所有像素,以查找具有透明性(0.01公差)且包含任何非透明像素的列或行,然后相应地修剪图像。
///crops image by trimming transparent edges
-(UIImage *)trimImage:(UIImage *)originalImage {
// components of replacement color – in a 255 UInt8 format (fairly standard bitmap format)
const CGFloat* colorComponents = CGColorGetComponents([UIColor colorWithRed:1 green:0 blue:0 alpha:1].CGColor);
UInt8* color255Components = calloc(sizeof(UInt8), 4);
for (int i = 0; i < 4; i++) color255Components[i] = (UInt8)round(colorComponents[i]*255.0);
// raw image reference
CGImageRef rawImage = originalImage.CGImage;
// image attributes
size_t width = CGImageGetWidth(rawImage);
size_t height = CGImageGetHeight(rawImage);
CGRect rect = {CGPointZero, {width, height}};
// image format
size_t bitsPerComponent = 8;
size_t bytesPerRow = width*4;
// the bitmap info
CGBitmapInfo bitmapInfo = kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big;
// data pointer – stores an array of the pixel components. For example (r0, b0, g0, a0, r1, g1, b1, a1 .... rn, gn, bn, an)
UInt8* data = calloc(bytesPerRow, height);
// get new RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// create bitmap context
CGContextRef ctx = CGBitmapContextCreate(data, width, height, bitsPerComponent, bytesPerRow, colorSpace, bitmapInfo);
// draw image into context (populating the data array while doing so)
CGContextDrawImage(ctx, rect, rawImage);
//float iln2 = 1.0f/log(2.0f);
float topTrim = 0;
float bottomTrim = 0;
float leftTrim = 0;
float rightTrim = 0;
@autoreleasepool {
int pixelPosition = 0;
//
float row = 1;
float column = 1;
BOOL found = NO;
while (row < height) {
while (column < width) {
pixelPosition = row*width+column;
NSInteger pixelIndex = 4*pixelPosition;
float alphaValue = data[pixelIndex+3]/255.0f;
if (alphaValue > 0.01f) {
found = YES;
break;
}
column++;
}
if (found) {
break;
}
column = 1;
row++;
}
topTrim = row;
//
row = height-1;
column = 1;
found = NO;
while (row > 0) {
while (column < width) {
pixelPosition = row*width+column;
NSInteger pixelIndex = 4*pixelPosition;
float alphaValue = data[pixelIndex+3]/255.0f;
if (alphaValue > 0.01f) {
found = YES;
break;
}
column++;
}
if (found) {
break;
}
column = 1;
row--;
}
bottomTrim = row;
//
row = 1;
column = 1;
found = NO;
while (column < width) {
while (row < height) {
pixelPosition = row*width+column;
NSInteger pixelIndex = 4*pixelPosition;
float alphaValue = data[pixelIndex+3]/255.0f;
if (alphaValue > 0.01f) {
found = YES;
break;
}
row++;
}
if (found) {
break;
}
row = 1;
column++;
}
leftTrim = column;
//
row = 1;
column = width-1;
found = NO;
while (column > 0) {
while (row < height) {
pixelPosition = row*width+column;
NSInteger pixelIndex = 4*pixelPosition;
float alphaValue = data[pixelIndex+3]/255.0f;
if (alphaValue > 0.01f) {
found = YES;
break;
}
row++;
}
if (found) {
break;
}
row = 1;
column--;
}
rightTrim = column;
}
// clean up
free(color255Components);
CGContextRelease(ctx);
CGColorSpaceRelease(colorSpace);
free(data);
//
float trimWidth = rightTrim-leftTrim;
float trimHeight = bottomTrim-topTrim;
UIView *trimCanvas = [[UIView alloc] initWithFrame:CGRectMake(0, 0, trimWidth, trimHeight)];
trimCanvas.backgroundColor = [UIColor clearColor];
UIImageView *trimImageView = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, width, height)];
trimImageView.image = originalImage;
trimImageView.contentMode = UIViewContentModeScaleToFill;
trimImageView.backgroundColor = [UIColor clearColor];
[trimCanvas addSubview:trimImageView];
//
trimImageView.center = CGPointMake(trimImageView.center.x-leftTrim, trimImageView.center.y-topTrim);
//
CGRect __rect = [trimCanvas bounds];
UIGraphicsBeginImageContextWithOptions(__rect.size, (NO), (originalImage.scale));
CGContextRef __context = UIGraphicsGetCurrentContext();
[trimCanvas.layer renderInContext:__context];
UIImage *__image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
//
return __image;
}