我正在尝试在320x320 imageview中调整图像大小。我正在从相机捕捉图像并从画廊加载。我正在使用上面的代码来调整大小;
-(UIImage *)scaleAndRotateImage:(UIImage *)image max:(int)kMaxResolution
{
// NSLog(@"W=%f h=%f",self.m_initialImage.size.width,self.m_initialImage.size.height);
// int kMaxResolution = 430;
CGImageRef imgRef = image.CGImage;
CGFloat width = CGImageGetWidth(imgRef);
CGFloat height = CGImageGetHeight(imgRef);
CGAffineTransform transform = CGAffineTransformIdentity;
CGRect bounds = CGRectMake(0, 0, width, height);
if (width > kMaxResolution || height > kMaxResolution) {
CGFloat ratio = width/height;
if (ratio > 1) {
bounds.size.width = kMaxResolution;
bounds.size.height = bounds.size.width / ratio;
}
else {
bounds.size.height = kMaxResolution;
bounds.size.width = bounds.size.height * ratio;
}
}
CGFloat scaleRatio = bounds.size.width / width;
CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef));
CGFloat boundHeight;
UIImageOrientation orient = image.imageOrientation;
switch(orient) {
case UIImageOrientationUp: //EXIF = 1
transform = CGAffineTransformIdentity;
break;
case UIImageOrientationUpMirrored: //EXIF = 2
transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
break;
case UIImageOrientationDown: //EXIF = 3
transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationDownMirrored: //EXIF = 4
transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
break;
case UIImageOrientationLeftMirrored: //EXIF = 5
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationLeft: //EXIF = 6
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationRightMirrored: //EXIF = 7
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeScale(-1.0, 1.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
case UIImageOrientationRight: //EXIF = 8
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
default:
[NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"];
}
UIGraphicsBeginImageContext(bounds.size);
CGContextRef context = UIGraphicsGetCurrentContext();
if (orient == UIImageOrientationRight || orient == UIImageOrientationLeft) {
CGContextScaleCTM(context, -scaleRatio, scaleRatio);
CGContextTranslateCTM(context, -height, 0);
}
else {
CGContextScaleCTM(context, scaleRatio, -scaleRatio);
CGContextTranslateCTM(context, 0, -height);
}
CGContextConcatCTM(context, transform);
// CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef);
// UIImage *imageCopy = UIGraphicsGetImageFromCurrentImageContext();
// UIGraphicsEndImageContext();
UIGraphicsBeginImageContextWithOptions(image.size, NO, 1.0);
[image drawInRect:CGRectMake(0, 0, image.size.width, image.size.height)];
UIImage *normalizedImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
// NSLog(@"%f,%f",image.size.width, image.size.height);
// float ratioWidth=image.size.width/380;
// float imgWidth=image.size.width/ratioWidth;
// float tmp=image.size.height/568;
// float imgHeight=image.size.height/tmp;
if(image.size.width<image.size.height)
{
normalizedImage=[self compressMe:normalizedImage width:320 height:500];
}
else
{
// normalizedImage=[self compressMe:normalizedImage width:380 height:380];
normalizedImage=[self compressMe:normalizedImage width:340 height:320];
}
// Resize image
// UIGraphicsBeginImageContext(CGSizeMake(320, 426));
// [image drawInRect: CGRectMake(0, 0, 320, 426)];
// UIImage *smallImage = UIGraphicsGetImageFromCurrentImageContext();
// UIGraphicsEndImageContext();
//
// CGRect cropRect = CGRectMake(0, 55, 320, 320);
// CGImageRef imageRef = CGImageCreateWithImageInRect([smallImage CGImage], cropRect);
//
// UIImage *normalizedImage=[UIImage imageWithCGImage:imageRef];
//
// CGImageRelease(imageRef);
return normalizedImage;
}
-(UIImage*)compressMe:(UIImage*)image width:(float)width height:(float)height
{
NSLog(@"%f,%f",width,height);
// Get size of current image
CGSize size = [image size];
if( size.width == width
&& size.height == height){
return image;
}
CGSize newSize = CGSizeMake(width, height);
double ratio;
double delta;
CGPoint offset;
//make a new square size, that is the resized imaged width
CGSize sz = CGSizeMake(newSize.width, newSize.height);
//figure out if the picture is landscape or portrait, then
//calculate scale factor and offset
if (image.size.width > image.size.height) {
ratio = newSize.height / image.size.height;
delta = ratio*(image.size.width - image.size.height);
offset = CGPointMake(delta/2, 0);
sz.width=ratio*image.size.width;
} else {
ratio = newSize.width / image.size.width;
delta = ratio*(image.size.height - image.size.width);
offset = CGPointMake(0, delta/2);
}
NSLog(@"%f,%f,%f,%f",offset.x,offset.y,ratio * image.size.height,(ratio * image.size.width));
//make the final clipping rect based on the calculated values
// CGRect clipRect = CGRectMake(0, 0,
// (ratio * image.size.width-((offset.y)+offset.x)),
// ((ratio * image.size.height)-(offset.y)*2));
CGRect clipRect = CGRectMake(0,0,
(ratio * image.size.width),
(ratio * image.size.height));
// CGRect clipRect = CGRectMake(0,0,width,height);
CGSize szz = CGSizeMake((ratio * image.size.width),
(ratio * image.size.height));
//start a new context, with scale factor 0.0 so retina displays get
//high quality image
if ([[UIScreen mainScreen] respondsToSelector:@selector(scale)]) {
UIGraphicsBeginImageContextWithOptions(szz, YES, 0.0);
} else {
UIGraphicsBeginImageContext(szz);
}
UIRectClip(clipRect);
[image drawInRect:clipRect];
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
NSLog(@"%f,%f",newImage.size.width,newImage.size.height);
return newImage;
}
图像没有正确调整大小,任何人都可以建议我在哪里缺乏。
由于
答案 0 :(得分:0)
抱歉,我没有阅读整个代码。
但是既然你试图将图像放在UIImageView中,为什么不使用“contentMode”属性让UIImageView适当调整图像大小以适合图像视图