/注、コードを修正しました。注の編集を探してください/
iOS 5.0以降では、iPadで実行するために、ユーザーが入力画像をマスクして、前景画像と背景画像の2つの新しい画像を生成できるようにする関数を作成しました。これらをUIImageViewに追加し、デバイスまたはシミュレーターに表示すると、期待どおりの結果が得られます。
ただし、データをセッションデータとしてエンコードしてこれらを保存すると、結果の画像が逆になります(つまり、画像のマットが反転します)。私たち2人がコードを実行しましたが、これらが逆になっている場所はなく、コピー/貼り付けエラーもありません。kCGImageAlphaPremultipliedFirstとkCGImageAlphaPremultipliedLastには何かがあるのではないかと思いました。つや消し画像をエンコードすると、kCGImageAlphaPremultipliedFirstで始まり、読み込まれるとkCGImageAlphaPremultipliedLastになります。
どんな助けやアイデアも大歓迎です。
Amy @ InsatiableGenius
以下の関数は次のように呼び出されます:
[self createMask];
[self addImageAndBackground:foregroundImg backgroundImg:backgroundImg];
- (UIImage*)maskImage:(UIImage *)image withMask:(UIImage *)maskImage {
CGImageRef maskRef = maskImage.CGImage;
CGImageRef mask = CGImageMaskCreate(CGImageGetWidth(maskRef),
CGImageGetHeight(maskRef),
CGImageGetBitsPerComponent(maskRef),
CGImageGetBitsPerPixel(maskRef),
CGImageGetBytesPerRow(maskRef),
CGImageGetDataProvider(maskRef), NULL, false);
CGImageRef sourceImage = [image CGImage];
CGImageRef imageWithAlpha = sourceImage;
if ((CGImageGetAlphaInfo(sourceImage) == kCGImageAlphaNone)
|| (CGImageGetAlphaInfo(sourceImage) == kCGImageAlphaNoneSkipFirst)
|| (CGImageGetAlphaInfo(sourceImage) == kCGImageAlphaNoneSkipLast)) {
imageWithAlpha = CopyImageAndAddAlphaChannel(sourceImage);
}
CGImageRef masked = CGImageCreateWithMask(imageWithAlpha, mask);
CGImageRelease(mask);
if (sourceImage != imageWithAlpha) {
CGImageRelease(imageWithAlpha);
}
UIImage* retImage = [UIImage imageWithCGImage:masked];
CGImageRelease(masked);
/* EDIT STARTS HERE return retImage; */
//Added extra render step to force it to save correct alpha values (not the mask)
UIImage* retImage = [UIImage imageWithCGImage:masked];
CGImageRelease(masked);
UIGraphicsBeginImageContext(retImage.size);
[retImage drawAtPoint:CGPointZero];
UIImage *newImg = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
retImage = nil;
return newImg;
}
-(void)createMask{
//take whole screen uiimage from paintview
//user painted black for mask, set rest of window to white
[paintView setWhiteBackground:YES];
//get user painted mask
UIImage *maskFromPaint = [paintView allocNormalResImageWithBlur:NO/*blur?*/];
[self dumpTestImg:maskFromPaint name:@"maskFromPaint"];
UIImage *maskNoAlpha = [maskFromPaint resetImageAlpha:1.0];
[self dumpTestImg:maskNoAlpha name:@"maskFromPaintNoAlpha"];
//mask has to be gray
UIImage *maskFromPaintGray = [self convertImageToGrayScale:maskNoAlpha];
[self dumpTestImg:maskFromPaintGray name:@"maskFromPaintGray"];
//Had to call this normalize function because some pngs are not compatiable (8 bit)
UIImage *disp_original = [[UIImage alloc] initWithCGImage:[[original normalize] CGImage] ];
//Resize original to screen size (alternatively we could upscale the paint... not sure which for now)
disp_original = [disp_original resizedImageWithContentMode:UIViewContentModeScaleAspectFit bounds:inputImageView.frame.size interpolationQuality:kCGInterpolationHigh] ;
CGSize imageInViewSize = disp_original.size;
//use size of displayed original to crop the paintview
CGRect overlayRect = CGRectMake((int)(inputImageView.frame.size.width - imageInViewSize.width) / 2,
(int)(inputImageView.frame.size.height - imageInViewSize.height) / 2,
(int)imageInViewSize.width,
(int)imageInViewSize.height);
//here is the actual crop
//get rectangle from paint that is the same size as the displayed original
CGImageRef maskFromPaintimageRef = CGImageCreateWithImageInRect([maskFromPaintGray CGImage], overlayRect);
UIImage *invertedMaskFromPaint = [UIImage imageWithCGImage:maskFromPaintimageRef];
self.maskImg = [self invertImage:invertedMaskFromPaint];
[self dumpTestImg:self.maskImg name:@"maskFromPaintCropped"];
self.backgroundImg = [self maskImage:disp_original withMask:self.maskImg];
self.foregroundImg = [self maskImage:disp_original withMask:invertedMaskFromPaint];
foregroundImgView.image = foregroundImg;
backgroundImgView.image = backgroundImg;
foregroundImgView.hidden =NO;
backgroundImgView.hidden =NO;
[container bringSubviewToFront:foregroundImgView];
[container bringSubviewToFront:backgroundImgView];
[self dumpTestImg:foregroundImg name:@"foregroundImg"];
[self dumpTestImg:backgroundImg name:@"backgroundImg"];
//cleanup
CGImageRelease(maskFromPaintimageRef);
maskFromPaint = nil;
maskFromPaintGray = nil;
maskNoAlpha = nil;
disp_original = nil;
//put things back
[paintView setWhiteBackground:NO];
}
CGImageRef CopyImageAndAddAlphaChannel(CGImageRef sourceImage) {
CGImageRef retVal = NULL;
size_t width = CGImageGetWidth(sourceImage);
size_t height = CGImageGetHeight(sourceImage);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef offscreenContext = CGBitmapContextCreate(NULL, width, height,
8, 0, colorSpace, kCGImageAlphaPremultipliedLast );
if (offscreenContext != NULL) {
CGContextDrawImage(offscreenContext, CGRectMake(0, 0, width, height), sourceImage);
retVal = CGBitmapContextCreateImage(offscreenContext);
CGContextRelease(offscreenContext);
}
CGColorSpaceRelease(colorSpace);
return retVal;
}
- (UIImage*)invertImage:(UIImage *)sourceImage {
CIContext *context = [CIContext contextWithOptions:nil];
CIFilter *filter= [CIFilter filterWithName:@"CIColorInvert"];
CIImage *inputImage = [[CIImage alloc] initWithImage:sourceImage];
[filter setValue:inputImage forKey:@"inputImage"];
return [UIImage imageWithCGImage:[context createCGImage:filter.outputImage fromRect:filter.outputImage.extent]];
}
-(void)addImageAndBackground:(UIImage *)foregroundImgIn backgroundImg:(UIImage *)backgroundImgIn{
UIImageView *tmpIV;
UIImageView *imgVF = [[UIImageView alloc] initWithImage: foregroundImgIn];
imgVF.userInteractionEnabled = YES;
[self dumpTestImg:foregroundImgIn name:@"foregroundIn"];
UIImageView *imgVB = [[UIImageView alloc] initWithImage: backgroundImgIn];
imgVB.userInteractionEnabled = YES;
[self dumpTestImg:backgroundImgIn name:@"backgroundIn"];
}