感谢您的光临。
这是我的代码
CIImage *result = _vignette.outputImage;
self.mainImageView.image = nil;
//self.mainImageView.contentMode = UIViewContentModeScaleAspectFit;
self.mainImageView.image = [UIImage imageWithCIImage:result];
self.mainImageView.contentMode = UIViewContentModeScaleAspectFit;
在这里_vignette
正确设置过滤器,图像效果正确应用于图像。
我正在使用分辨率为500x375的源图像。我的imageView几乎拥有iPhone屏幕的分辨率。所以为了避免拉伸,我正在使用AspectFit。
但是在我将结果图像分配回我的imageView后应用效果后,它会拉伸。无论我使用哪个UIViewContentMode
。它不起作用。无论我给出的过滤器如何,它似乎总是适用ScaleToFill
。
知道为什么会这样吗?任何建议都受到高度赞赏。
答案 0 :(得分:21)
(1)Aspect Fit 拉伸图像 - 以适应。如果您根本不想拉伸图像,请使用中心(例如)。
(2)imageWithCIImage
给你一个非常奇怪的野兽,一个不基于CGImage的UIImage,因此不受普通的图层显示规则的影响。它实际上只不过是CIImage的薄包装,这不是你想要的。你必须通过CGImage将CIFilter输出转换(渲染)到UIImage,从而为你提供一个实际上有一些位的UIImage(CGImage,一个位图)。我在这里的讨论为您提供了演示代码:
http://www.apeth.com/iOSBook/ch15.html#_cifilter_and_ciimage
换句话说,在某些时候,您必须调用CIContext createCGImage:fromRect:
从CIFilter的输出生成CGImageRef,并将其传递给UIImage。在您这样做之前,您没有将过滤器操作的输出作为真正的UIImage。
或者,您可以将图像从imageWithCIImage
绘制到图形上下文中。例如,您可以将其绘制到图像图形上下文中,然后使用那个图像。
无法做的是直接显示imageWithCIImage
的图像。那是因为它不是图像!它没有底层位图(CGImage)。那里没有。它只是一组用于导出图像的CIFilter指令。
答案 1 :(得分:1)
我只是花了一整天时间。我遇到定向问题,然后输出质量差。
使用相机拍摄图像后,我会这样做。
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [[UIImage alloc] initWithData:imageData];
这会导致旋转问题,所以我需要这样做。
UIImage *scaleAndRotateImage(UIImage *image)
{
int kMaxResolution = image.size.height; // Or whatever
CGImageRef imgRef = image.CGImage;
CGFloat width = CGImageGetWidth(imgRef);
CGFloat height = CGImageGetHeight(imgRef);
CGAffineTransform transform = CGAffineTransformIdentity;
CGRect bounds = CGRectMake(0, 0, width, height);
if (width > kMaxResolution || height > kMaxResolution) {
CGFloat ratio = width/height;
if (ratio > 1) {
bounds.size.width = kMaxResolution;
bounds.size.height = bounds.size.width / ratio;
}
else {
bounds.size.height = kMaxResolution;
bounds.size.width = bounds.size.height * ratio;
}
}
CGFloat scaleRatio = bounds.size.width / width;
CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef));
CGFloat boundHeight;
UIImageOrientation orient = image.imageOrientation;
switch(orient) {
case UIImageOrientationUp: //EXIF = 1
transform = CGAffineTransformIdentity;
break;
case UIImageOrientationUpMirrored: //EXIF = 2
transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
break;
case UIImageOrientationDown: //EXIF = 3
transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height);
transform = CGAffineTransformRotate(transform, M_PI);
break;
case UIImageOrientationDownMirrored: //EXIF = 4
transform = CGAffineTransformMakeTranslation(0.0, imageSize.height);
transform = CGAffineTransformScale(transform, 1.0, -1.0);
break;
case UIImageOrientationLeftMirrored: //EXIF = 5
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width);
transform = CGAffineTransformScale(transform, -1.0, 1.0);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationLeft: //EXIF = 6
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(0.0, imageSize.width);
transform = CGAffineTransformRotate(transform, 3.0 * M_PI / 2.0);
break;
case UIImageOrientationRightMirrored: //EXIF = 7
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeScale(-1.0, 1.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
case UIImageOrientationRight: //EXIF = 8
boundHeight = bounds.size.height;
bounds.size.height = bounds.size.width;
bounds.size.width = boundHeight;
transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0);
transform = CGAffineTransformRotate(transform, M_PI / 2.0);
break;
default:
[NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"];
}
UIGraphicsBeginImageContext(bounds.size);
CGContextRef context = UIGraphicsGetCurrentContext();
if (orient == UIImageOrientationRight || orient == UIImageOrientationLeft) {
CGContextScaleCTM(context, -scaleRatio, scaleRatio);
CGContextTranslateCTM(context, -height, 0);
}
else {
CGContextScaleCTM(context, scaleRatio, -scaleRatio);
CGContextTranslateCTM(context, 0, -height);
}
CGContextConcatCTM(context, transform);
CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef);
UIImage *imageCopy = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return imageCopy;
}
然后当我应用我的过滤器时,我这样做(特别感谢这个线程 - 特别是亚光和Wex)
-(UIImage*)processImage:(UIImage*)image {
CIImage *inImage = [CIImage imageWithCGImage:image.CGImage];
CIFilter *filter = [CIFilter filterWithName:@"CIColorControls" keysAndValues:
kCIInputImageKey, inImage,
@"inputContrast", [NSNumber numberWithFloat:1.0],
nil];
UIImage *outImage = [filter outputImage];
//Juicy bit
CGImageRef cgimageref = [[CIContext contextWithOptions:nil] createCGImage:outImage fromRect:[outImage extent]];
return [UIImage imageWithCGImage:cgimageref];
}