结合两个UIImages:底部UIImage溢出

时间:2013-07-01 12:12:00

标签: cocoa-touch uiimage uikit core-graphics image-manipulation

我有一个自定义UITableViewCell类,我在其中一个UITableViewController类中使用。我粘贴了下面的代码。对不起,如果它有点冗长,但我相信每一段代码都与此问题相关。 在自定义单元格类中,我拍摄图像,使用UIBezierPath对其进行遮罩以使其成圆,创建另一个添加边框和内部阴影的图像(也使用UIBezierPath),然后合并两张图片。

一切正常,但第一幅图像(实际图像位于底部)溢出第二幅图像周围的一个像素(内部阴影位于顶部)。我试图摆弄CGRect结构的价值,但这没有做到这一点......这是一个图像说明我在说什么:

Picture overflows

以下是自定义UITableViewCell的代码:

#import "AZTrackedUserMenuCell.h"
#import "UIImageView+AFNetworking.h"

@interface AZTrackedUserMenuCell ()

@property (nonatomic, strong) UIActivityIndicatorView *ai;

@end

@implementation AZTrackedUserMenuCell

- (void)layoutSubviews
{
    [super layoutSubviews];

    self.ai = [[UIActivityIndicatorView alloc] initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleGray];

    CGFloat aiWidth = self.ai.frame.size.width;
    CGFloat aiHeight = self.ai.frame.size.height;

    self.ai.frame = CGRectMake(42.0, 38.0, aiWidth, aiHeight);

    [self addSubview:self.ai];

    [self.ai startAnimating];

    [self displayImage];

    UILabel *usernameLabel = (UILabel *)[self viewWithTag:2];
    if (usernameLabel == nil) {
        usernameLabel = [[UILabel alloc] initWithFrame:CGRectMake(95.0, 15.0, 140.0, 21)];
        usernameLabel.tag = 2;
        [self addSubview:usernameLabel];
    }

    usernameLabel.font = [UIFont boldSystemFontOfSize:14.0];
    usernameLabel.textColor = [UIColor colorWithWhite:0.9 alpha:1.000];
    usernameLabel.backgroundColor = [UIColor colorWithWhite:0.0 alpha:0.0];

    if (self.user == nil) {
        self.tag = -1;
        usernameLabel.text = @"No user tracked";
    } else {
        self.tag = 0;
        usernameLabel.text = self.user[@"display_name"];
    }


    self.backgroundColor = [UIColor clearColor];
}

- (void)displayImage
{
    UIImageView *avatarImageView = [[UIImageView alloc] initWithFrame:CGRectMake(15.0, 10.0, 84.0, 84.0)];

    if (self.user == nil) {
        avatarImageView.image = [self renderImage:[UIImage imageNamed:@"defaultUserImage"] inImageView:avatarImageView];
        [self.ai stopAnimating];
        self.ai.hidden = YES;
        [self addSubview:avatarImageView];
    } else {
        NSURLRequest *avatarURLRequest = [NSURLRequest requestWithURL:[NSURL URLWithString:self.user[@"profile_image"]]];
        __weak typeof(avatarImageView) weakAvatarImageView = avatarImageView;
        [avatarImageView setImageWithURLRequest:avatarURLRequest
                               placeholderImage:[UIImage imageNamed:@"no-image"]
                                        success:^(NSURLRequest *request, NSHTTPURLResponse *response, UIImage *image) {
                                            weakAvatarImageView.image = [self renderImage:image inImageView:weakAvatarImageView];
                                            [self addSubview:weakAvatarImageView];
                                            [self.ai stopAnimating];
                                        } failure:^(NSURLRequest *request, NSHTTPURLResponse *response, NSError *error) {

                                        }];
    }

}

- (UIImage *)renderImage:(UIImage *)image inImageView:(UIImageView *)imageView
{
    UIImage *maskedImage = nil;

    UIGraphicsBeginImageContextWithOptions(imageView.bounds.size, NO, 0.0);
    CGContextRef maskContext = UIGraphicsGetCurrentContext();

    UIBezierPath* maskOvalPath = [UIBezierPath bezierPathWithOvalInRect: CGRectMake(0.0, 0.0, 76.0, 76.0)];
    CGContextSaveGState(maskContext);
    {
        [maskOvalPath addClip];

        [image drawInRect:CGRectMake(0.0, 0.0, 76.0, 76.0)];

        maskedImage = UIGraphicsGetImageFromCurrentImageContext();
    }

    CGContextRestoreGState(maskContext);

    UIGraphicsEndImageContext();

    UIGraphicsBeginImageContextWithOptions(imageView.bounds.size, NO, 0.0);

    CGContextRef ssContext = UIGraphicsGetCurrentContext();

    UIColor* fillColor = [UIColor colorWithRed: 0 green: 0 blue: 0 alpha: 0];
    UIColor* strokeColor = [UIColor colorWithRed: 0 green: 0 blue: 0 alpha: 0.529];
    UIColor* strokeColor2 = [UIColor colorWithRed: 0 green: 0 blue: 0 alpha: 1];

    UIColor* shadow = strokeColor;
    CGSize shadowOffset = CGSizeMake(0.1, -0.1);
    CGFloat shadowBlurRadius = 25.0;

    // Stroke and shadow oval path
    UIBezierPath* ssOvalPath = [UIBezierPath bezierPathWithOvalInRect: CGRectMake(0.0, 0.0, 76.0, 76.0)];
    [fillColor setFill];
    [ssOvalPath fill];

    CGRect ovalBorderRect = CGRectInset(ssOvalPath.bounds, -shadowBlurRadius, -shadowBlurRadius);
    ovalBorderRect = CGRectOffset(ovalBorderRect, -shadowOffset.width, -shadowOffset.height);
    ovalBorderRect = CGRectInset(CGRectUnion(ovalBorderRect, ssOvalPath.bounds), -1, -1);

    UIBezierPath* ovalNegativePath = [UIBezierPath bezierPathWithRect: ovalBorderRect];
    [ovalNegativePath appendPath:ssOvalPath];
    ovalNegativePath.usesEvenOddFillRule = YES;

    // Stroke and shadow image
    UIImage *ss = nil;

    CGContextSaveGState(ssContext);
    {
        CGFloat xOffset = shadowOffset.width + round(ovalBorderRect.size.width);
        CGFloat yOffset = shadowOffset.height;
        CGContextSetShadowWithColor(ssContext,
                                    CGSizeMake(xOffset + copysign(0.1, xOffset), yOffset + copysign(0.1, yOffset)),
                                    shadowBlurRadius,
                                    shadow.CGColor);

        [ssOvalPath addClip];
        CGAffineTransform transform = CGAffineTransformMakeTranslation(-round(ovalBorderRect.size.width), 0);
        [ovalNegativePath applyTransform: transform];
        [[UIColor grayColor] setFill];
        [ovalNegativePath fill];

        [strokeColor2 setStroke];
        ssOvalPath.lineWidth = 3;
        [ssOvalPath stroke];

        ss = UIGraphicsGetImageFromCurrentImageContext();
    }
    CGContextRestoreGState(ssContext);

    UIGraphicsEndImageContext();

    UIGraphicsBeginImageContextWithOptions(imageView.bounds.size, NO, 0.0);

    CGContextRef context = UIGraphicsGetCurrentContext();

    CGContextSaveGState(context);
    {
        [maskedImage drawInRect:CGRectMake(0.0, 0.0, maskedImage.size.width, maskedImage.size.height)];
        [ss drawInRect:CGRectMake(maskedImage.size.width - ss.size.width, maskedImage.size.height - ss.size.height, ss.size.width, ss.size.height)];

        imageView.image = UIGraphicsGetImageFromCurrentImageContext();
    }
    CGContextRestoreGState(context);

    UIGraphicsEndImageContext();

    return imageView.image;
}

@end

如何删除此溢出?

编辑1 :我记录了所有三张图片的尺寸:

DDLogInfo(@"%f, %f", maskedImage.size.width, maskedImage.size.height);
DDLogInfo(@"%f, %f", ss.size.width, ss.size.height);
DDLogInfo(@"%f, %f", imageView.image.size.width, imageView.image.size.height);

我得到了以下输出:

2013-07-01 20:03:47:075 Stackify[25044:c07] 84.000000, 84.000000
2013-07-01 20:03:47:075 Stackify[25044:c07] 84.000000, 84.000000
2013-07-01 20:03:47:075 Stackify[25044:c07] 84.000000, 84.000000

编辑2:根据@ peter-hosey的建议,我重构了renderImage:inImageView:方法。我不知道代码是否更好但是这里是:(注意结果是相同的)

- (UIImage *)renderImage:(UIImage *)image inImageView:(UIImageView *)imageView
{
    UIGraphicsBeginImageContextWithOptions(imageView.bounds.size, NO, 0.0);
    CGContextRef maskContext = UIGraphicsGetCurrentContext();

    UIColor* fillColor = [UIColor colorWithRed: 0 green: 0 blue: 0 alpha: 0];
    UIColor* strokeColor = [UIColor colorWithRed: 0 green: 0 blue: 0 alpha: 0.529];
    UIColor* strokeColor2 = [UIColor colorWithRed: 0 green: 0 blue: 0 alpha: 1];

    UIColor* shadow = strokeColor;
    CGSize shadowOffset = CGSizeMake(0.1, -0.1);
    CGFloat shadowBlurRadius = 25.0;

    UIBezierPath* ssOvalPath = [UIBezierPath bezierPathWithOvalInRect: CGRectMake(0.0, 0.0, 76.0, 76.0)];
    [fillColor setFill];
    [ssOvalPath fill];

    CGRect ovalBorderRect = CGRectInset(ssOvalPath.bounds, -shadowBlurRadius, -shadowBlurRadius);
    ovalBorderRect = CGRectOffset(ovalBorderRect, -shadowOffset.width, -shadowOffset.height);
    ovalBorderRect = CGRectInset(CGRectUnion(ovalBorderRect, ssOvalPath.bounds), -1, -1);

    UIBezierPath* ovalNegativePath = [UIBezierPath bezierPathWithRect: ovalBorderRect];
    [ovalNegativePath appendPath:ssOvalPath];
    ovalNegativePath.usesEvenOddFillRule = YES;

    UIImage *ss = nil;

    UIBezierPath* maskOvalPath = [UIBezierPath bezierPathWithOvalInRect: CGRectMake(0.0, 0.0, 76.0, 76.0)];
    CGContextSaveGState(maskContext);
    {
        CGFloat xOffset = shadowOffset.width + round(ovalBorderRect.size.width);
        CGFloat yOffset = shadowOffset.height;
        CGContextSetShadowWithColor(maskContext,
                                    CGSizeMake(xOffset + copysign(0.1, xOffset), yOffset + copysign(0.1, yOffset)),
                                    shadowBlurRadius,
                                    shadow.CGColor);

        [ssOvalPath addClip];
        CGAffineTransform transform = CGAffineTransformMakeTranslation(-round(ovalBorderRect.size.width), 0);
        [ovalNegativePath applyTransform: transform];
        [[UIColor grayColor] setFill];
        [ovalNegativePath fill];

        [strokeColor2 setStroke];
        ssOvalPath.lineWidth = 3;
        [ssOvalPath stroke];

        ss = UIGraphicsGetImageFromCurrentImageContext();

        [maskOvalPath addClip];
        [image drawInRect:CGRectMake(0.0, 0.0, 76.0, 76.0)];

        UIImage *maskedImage = UIGraphicsGetImageFromCurrentImageContext();

        [maskedImage drawInRect:CGRectMake(0.0, 0.0, maskedImage.size.width, maskedImage.size.height)];
        [ss drawInRect:CGRectMake(maskedImage.size.width - ss.size.width, maskedImage.size.height - ss.size.height, ss.size.width, ss.size.height)];

        imageView.image = UIGraphicsGetImageFromCurrentImageContext();
    }

    CGContextRestoreGState(maskContext);

    UIGraphicsEndImageContext();

    return imageView.image;
}

似乎不是溢出,而是产生的图像质量下降,但我不确定..

0 个答案:

没有答案