I am working on one application where i have to load images from server.
我正在尝试从appstore的链接加载应用程序屏幕截图。 我得到的图像,但不是那么尖锐&明确。我在背景中获取图像&一切正常。但结果图像看起来有点模糊。我在视网膜显示器中测试这个图像。任何人都知道为什么会这样。任何解决方案都会有帮助。
谢谢,
这是我的图片加载代码:
// This will create the imageview with required frame & use the url to load the image
-(void)loadAppsScreenShots:(int)i Frame:(CGRect)frame withImageUrl:(NSString *)urlStr
{
UIImageView *appImageView = [[UIImageView alloc] init];
frame.origin.x = 0;
appImageView.frame = frame;
appImageView.tag = i;
sharedImageCache = [ImageCache sharedImageCacheInstance];
UIImage *image1 = [sharedImageCache getCachedImage:[NSString stringWithFormat:@"%@",urlStr]];
if (image1==nil)
{
// Show indicator till image loads
UIActivityIndicatorView *indiView = [[UIActivityIndicatorView alloc] initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleWhite];
indiView.center = CGPointMake(appImageView.frame.size.width/2, appImageView.frame.size.height/2);
[appImageView addSubview:indiView];
[indiView startAnimating];
indiView.hidden = FALSE;
// Show label indicating image loading process
UILabel *loadingLbl = [[UILabel alloc] initWithFrame:CGRectMake(0, 0, 200, 25)];
loadingLbl.text = @"";//@"Please wait...";
loadingLbl.center = CGPointMake(appImageView.frame.size.width/2 + 5, appImageView.frame.size.height/2 + 23);
loadingLbl.font = [UIFont fontWithName:@"Helvetica-Bold" size:15.0f];
loadingLbl.textAlignment = UITextAlignmentCenter;
loadingLbl.backgroundColor = [UIColor clearColor];
loadingLbl.textColor = [UIColor whiteColor];
[appImageView addSubview:loadingLbl];
[appImageView sendSubviewToBack:loadingLbl];
loadingLbl.hidden = FALSE;
// Dictionalry to get all objects & pass it to method where we load the data
NSMutableDictionary *dict = [[NSMutableDictionary alloc] init];
[dict setObject:appImageView forKey:@"imageView"];
if (urlStr != nil) {
[dict setObject:urlStr forKey:@"url"];
}
[dict setObject:indiView forKey:@"indi"];
[dict setObject:loadingLbl forKey:@"loadingLbl"];
[self performSelectorInBackground:@selector(loadImageFromURLAndSaveInDocDir:) withObject:dict];
}
else
{
appImageView.image = image1;
}
[[appView viewWithTag:i] addSubview:appImageView];
[appView bringSubviewToFront:appImageView];
appImageView.contentMode = UIViewContentModeScaleAspectFit;
appImageView=nil;
}
-(void)loadImageFromURLAndSaveInDocDir:(NSMutableDictionary *)dict
{
@autoreleasepool
{
UIImageView *cellImageViewObj = [dict objectForKey:@"imageView"];
NSString *url;
UIActivityIndicatorView *indiview = [dict objectForKey:@"indi"];
UILabel *Lbl = [dict objectForKey:@"loadingLbl"];
if ([dict objectForKey:@"url"])
{
url = [dict objectForKey:@"url"];
// fetch the data
NSURL *imgURL = [NSURL URLWithString:url];
NSData *imgData = [NSData dataWithContentsOfURL:imgURL];
NSString *filename = [Utils getFileNameFromURL:url];
// Cache the image
[sharedImageCache cacheImage:[NSString stringWithFormat:@"%@",filename] :imgData];
UIImage *image1 = [[UIImage alloc] initWithData:imgData];
cellImageViewObj.image = image1;
image1=nil;
}
else {
url = @"";
}
// set the content mode & hide the indicator & label
cellImageViewObj.contentMode = UIViewContentModeScaleAspectFit;
[indiview stopAnimating];
indiview.hidden = TRUE;
Lbl.hidden = TRUE;
dict = nil;
}
}
我做错了什么。
答案 0 :(得分:0)
问题是您以自然尺寸显示图像。在视网膜设备上,您需要的图像宽度是其绘制视图的两倍,高两倍。
假设图像为200x200,您将在100x100视图中显示它。正确的方法是:
获取数据的CGImageRef
使用以下方法创建UIImage,比例为2(视网膜)
结果是图像大小为100x100但比例为2。
也就是说,既然您指定了'UIViewContentModeScaleAspectFit',您可以只拍摄200x200图像并将其关闭到UIImageView,但在这种情况下,您必须强制imageView的frame.size为100x100。 / p>
答案 1 :(得分:0)
如果图像大小与imageview不同,则可以根据视图按比例缩放服务器图像。
- (UIImage *)imageByScalingProportionallyToSize:(CGSize)targetSize;
- (UIImage *)imageByScalingProportionallyToSize:(CGSize)targetSize {
UIImage *sourceImage = self;
UIImage *newImage = nil;
CGSize imageSize = sourceImage.size;
CGFloat width = imageSize.width;
CGFloat height = imageSize.height;
CGFloat targetWidth = targetSize.width;
CGFloat targetHeight = targetSize.height;
CGFloat scaleFactor = 0.0;
CGFloat scaledWidth = targetWidth;
CGFloat scaledHeight = targetHeight;
CGPoint thumbnailPoint = CGPointMake(0.0,0.0);
if (CGSizeEqualToSize(imageSize, targetSize) == NO) {
CGFloat widthFactor = targetWidth / width;
CGFloat heightFactor = targetHeight / height;
if (widthFactor < heightFactor)
scaleFactor = widthFactor;
else
scaleFactor = heightFactor;
scaledWidth = width * scaleFactor;
scaledHeight = height * scaleFactor;
// center the image
if (widthFactor < heightFactor) {
thumbnailPoint.y = (targetHeight - scaledHeight) * 0.5;
} else if (widthFactor > heightFactor) {
thumbnailPoint.x = (targetWidth - scaledWidth) * 0.5;
}
}
// this is actually the interesting part:
UIGraphicsBeginImageContext(targetSize);
CGRect thumbnailRect = CGRectZero;
thumbnailRect.origin = thumbnailPoint;
thumbnailRect.size.width = scaledWidth;
thumbnailRect.size.height = scaledHeight;
[sourceImage drawInRect:thumbnailRect];
newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
if(newImage == nil) NSLog(@"could not scale image");
return newImage ;
}