UITableViewCell
クラスの 1 つで使用するカスタムクラスがありUITableViewController
ます。そのコードを以下に貼り付けました。少し長い場合は申し訳ありませんが、すべてのコードが質問に関連していると思います。カスタム セル クラスでは、画像を取得し、 でマスクしUIBezierPath
て円にし、境界線と内側の影を追加する別の画像を作成し (これも を使用UIBezierPath
)、2 つの画像を結合します。
最初の画像 (下にある実際の画像) が 2 番目の画像 (上にある内側の影) の周りで 1 ピクセルだけオーバーフローすることを除いて、すべて正常に機能します。構造体の値をCGRect
いじろうとしましたが、うまくいきませんでした...これは私が言っていることを示す画像です:
カスタムのコードは次のUITableViewCell
とおりです。
#import "AZTrackedUserMenuCell.h"
#import "UIImageView+AFNetworking.h"
@interface AZTrackedUserMenuCell ()
@property (nonatomic, strong) UIActivityIndicatorView *ai;
@end
@implementation AZTrackedUserMenuCell
- (void)layoutSubviews
{
[super layoutSubviews];
self.ai = [[UIActivityIndicatorView alloc] initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleGray];
CGFloat aiWidth = self.ai.frame.size.width;
CGFloat aiHeight = self.ai.frame.size.height;
self.ai.frame = CGRectMake(42.0, 38.0, aiWidth, aiHeight);
[self addSubview:self.ai];
[self.ai startAnimating];
[self displayImage];
UILabel *usernameLabel = (UILabel *)[self viewWithTag:2];
if (usernameLabel == nil) {
usernameLabel = [[UILabel alloc] initWithFrame:CGRectMake(95.0, 15.0, 140.0, 21)];
usernameLabel.tag = 2;
[self addSubview:usernameLabel];
}
usernameLabel.font = [UIFont boldSystemFontOfSize:14.0];
usernameLabel.textColor = [UIColor colorWithWhite:0.9 alpha:1.000];
usernameLabel.backgroundColor = [UIColor colorWithWhite:0.0 alpha:0.0];
if (self.user == nil) {
self.tag = -1;
usernameLabel.text = @"No user tracked";
} else {
self.tag = 0;
usernameLabel.text = self.user[@"display_name"];
}
self.backgroundColor = [UIColor clearColor];
}
- (void)displayImage
{
UIImageView *avatarImageView = [[UIImageView alloc] initWithFrame:CGRectMake(15.0, 10.0, 84.0, 84.0)];
if (self.user == nil) {
avatarImageView.image = [self renderImage:[UIImage imageNamed:@"defaultUserImage"] inImageView:avatarImageView];
[self.ai stopAnimating];
self.ai.hidden = YES;
[self addSubview:avatarImageView];
} else {
NSURLRequest *avatarURLRequest = [NSURLRequest requestWithURL:[NSURL URLWithString:self.user[@"profile_image"]]];
__weak typeof(avatarImageView) weakAvatarImageView = avatarImageView;
[avatarImageView setImageWithURLRequest:avatarURLRequest
placeholderImage:[UIImage imageNamed:@"no-image"]
success:^(NSURLRequest *request, NSHTTPURLResponse *response, UIImage *image) {
weakAvatarImageView.image = [self renderImage:image inImageView:weakAvatarImageView];
[self addSubview:weakAvatarImageView];
[self.ai stopAnimating];
} failure:^(NSURLRequest *request, NSHTTPURLResponse *response, NSError *error) {
}];
}
}
- (UIImage *)renderImage:(UIImage *)image inImageView:(UIImageView *)imageView
{
UIImage *maskedImage = nil;
UIGraphicsBeginImageContextWithOptions(imageView.bounds.size, NO, 0.0);
CGContextRef maskContext = UIGraphicsGetCurrentContext();
UIBezierPath* maskOvalPath = [UIBezierPath bezierPathWithOvalInRect: CGRectMake(0.0, 0.0, 76.0, 76.0)];
CGContextSaveGState(maskContext);
{
[maskOvalPath addClip];
[image drawInRect:CGRectMake(0.0, 0.0, 76.0, 76.0)];
maskedImage = UIGraphicsGetImageFromCurrentImageContext();
}
CGContextRestoreGState(maskContext);
UIGraphicsEndImageContext();
UIGraphicsBeginImageContextWithOptions(imageView.bounds.size, NO, 0.0);
CGContextRef ssContext = UIGraphicsGetCurrentContext();
UIColor* fillColor = [UIColor colorWithRed: 0 green: 0 blue: 0 alpha: 0];
UIColor* strokeColor = [UIColor colorWithRed: 0 green: 0 blue: 0 alpha: 0.529];
UIColor* strokeColor2 = [UIColor colorWithRed: 0 green: 0 blue: 0 alpha: 1];
UIColor* shadow = strokeColor;
CGSize shadowOffset = CGSizeMake(0.1, -0.1);
CGFloat shadowBlurRadius = 25.0;
// Stroke and shadow oval path
UIBezierPath* ssOvalPath = [UIBezierPath bezierPathWithOvalInRect: CGRectMake(0.0, 0.0, 76.0, 76.0)];
[fillColor setFill];
[ssOvalPath fill];
CGRect ovalBorderRect = CGRectInset(ssOvalPath.bounds, -shadowBlurRadius, -shadowBlurRadius);
ovalBorderRect = CGRectOffset(ovalBorderRect, -shadowOffset.width, -shadowOffset.height);
ovalBorderRect = CGRectInset(CGRectUnion(ovalBorderRect, ssOvalPath.bounds), -1, -1);
UIBezierPath* ovalNegativePath = [UIBezierPath bezierPathWithRect: ovalBorderRect];
[ovalNegativePath appendPath:ssOvalPath];
ovalNegativePath.usesEvenOddFillRule = YES;
// Stroke and shadow image
UIImage *ss = nil;
CGContextSaveGState(ssContext);
{
CGFloat xOffset = shadowOffset.width + round(ovalBorderRect.size.width);
CGFloat yOffset = shadowOffset.height;
CGContextSetShadowWithColor(ssContext,
CGSizeMake(xOffset + copysign(0.1, xOffset), yOffset + copysign(0.1, yOffset)),
shadowBlurRadius,
shadow.CGColor);
[ssOvalPath addClip];
CGAffineTransform transform = CGAffineTransformMakeTranslation(-round(ovalBorderRect.size.width), 0);
[ovalNegativePath applyTransform: transform];
[[UIColor grayColor] setFill];
[ovalNegativePath fill];
[strokeColor2 setStroke];
ssOvalPath.lineWidth = 3;
[ssOvalPath stroke];
ss = UIGraphicsGetImageFromCurrentImageContext();
}
CGContextRestoreGState(ssContext);
UIGraphicsEndImageContext();
UIGraphicsBeginImageContextWithOptions(imageView.bounds.size, NO, 0.0);
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextSaveGState(context);
{
[maskedImage drawInRect:CGRectMake(0.0, 0.0, maskedImage.size.width, maskedImage.size.height)];
[ss drawInRect:CGRectMake(maskedImage.size.width - ss.size.width, maskedImage.size.height - ss.size.height, ss.size.width, ss.size.height)];
imageView.image = UIGraphicsGetImageFromCurrentImageContext();
}
CGContextRestoreGState(context);
UIGraphicsEndImageContext();
return imageView.image;
}
@end
このオーバーフローを削除するにはどうすればよいですか?
編集 1 : 3 つの画像すべてのサイズを記録しました。
DDLogInfo(@"%f, %f", maskedImage.size.width, maskedImage.size.height);
DDLogInfo(@"%f, %f", ss.size.width, ss.size.height);
DDLogInfo(@"%f, %f", imageView.image.size.width, imageView.image.size.height);
そして、次の出力を得ました。
2013-07-01 20:03:47:075 Stackify[25044:c07] 84.000000, 84.000000
2013-07-01 20:03:47:075 Stackify[25044:c07] 84.000000, 84.000000
2013-07-01 20:03:47:075 Stackify[25044:c07] 84.000000, 84.000000
編集 2: @peter-hosey からのアドバイスに従って、renderImage:inImageView:
メソッドをリファクタリングしました。コードがより良いかどうかはわかりませんが、ここにあります:(結果は同じであることに注意してください)
- (UIImage *)renderImage:(UIImage *)image inImageView:(UIImageView *)imageView
{
UIGraphicsBeginImageContextWithOptions(imageView.bounds.size, NO, 0.0);
CGContextRef maskContext = UIGraphicsGetCurrentContext();
UIColor* fillColor = [UIColor colorWithRed: 0 green: 0 blue: 0 alpha: 0];
UIColor* strokeColor = [UIColor colorWithRed: 0 green: 0 blue: 0 alpha: 0.529];
UIColor* strokeColor2 = [UIColor colorWithRed: 0 green: 0 blue: 0 alpha: 1];
UIColor* shadow = strokeColor;
CGSize shadowOffset = CGSizeMake(0.1, -0.1);
CGFloat shadowBlurRadius = 25.0;
UIBezierPath* ssOvalPath = [UIBezierPath bezierPathWithOvalInRect: CGRectMake(0.0, 0.0, 76.0, 76.0)];
[fillColor setFill];
[ssOvalPath fill];
CGRect ovalBorderRect = CGRectInset(ssOvalPath.bounds, -shadowBlurRadius, -shadowBlurRadius);
ovalBorderRect = CGRectOffset(ovalBorderRect, -shadowOffset.width, -shadowOffset.height);
ovalBorderRect = CGRectInset(CGRectUnion(ovalBorderRect, ssOvalPath.bounds), -1, -1);
UIBezierPath* ovalNegativePath = [UIBezierPath bezierPathWithRect: ovalBorderRect];
[ovalNegativePath appendPath:ssOvalPath];
ovalNegativePath.usesEvenOddFillRule = YES;
UIImage *ss = nil;
UIBezierPath* maskOvalPath = [UIBezierPath bezierPathWithOvalInRect: CGRectMake(0.0, 0.0, 76.0, 76.0)];
CGContextSaveGState(maskContext);
{
CGFloat xOffset = shadowOffset.width + round(ovalBorderRect.size.width);
CGFloat yOffset = shadowOffset.height;
CGContextSetShadowWithColor(maskContext,
CGSizeMake(xOffset + copysign(0.1, xOffset), yOffset + copysign(0.1, yOffset)),
shadowBlurRadius,
shadow.CGColor);
[ssOvalPath addClip];
CGAffineTransform transform = CGAffineTransformMakeTranslation(-round(ovalBorderRect.size.width), 0);
[ovalNegativePath applyTransform: transform];
[[UIColor grayColor] setFill];
[ovalNegativePath fill];
[strokeColor2 setStroke];
ssOvalPath.lineWidth = 3;
[ssOvalPath stroke];
ss = UIGraphicsGetImageFromCurrentImageContext();
[maskOvalPath addClip];
[image drawInRect:CGRectMake(0.0, 0.0, 76.0, 76.0)];
UIImage *maskedImage = UIGraphicsGetImageFromCurrentImageContext();
[maskedImage drawInRect:CGRectMake(0.0, 0.0, maskedImage.size.width, maskedImage.size.height)];
[ss drawInRect:CGRectMake(maskedImage.size.width - ss.size.width, maskedImage.size.height - ss.size.height, ss.size.width, ss.size.height)];
imageView.image = UIGraphicsGetImageFromCurrentImageContext();
}
CGContextRestoreGState(maskContext);
UIGraphicsEndImageContext();
return imageView.image;
}
オーバーフローというより画質が落ちているようですが、よくわかりません..