/注意,我已经修复了代码.. 查找编辑注释/
对于 iOS 5.0+,为了在 iPad 上运行,我创建了一个函数来允许用户屏蔽输入图像,生成两个新图像,一个前景图像和一个背景图像。当我将这些添加到 UIImageView 并在设备或模拟器上显示时,我得到了我所期望的。
但是,当我通过将数据编码为会话数据来保存这些图像时,生成的图像是向后的(即图像遮罩已被反转)。我们两个人跑过代码,没有任何地方是颠倒的,没有复制/粘贴错误。我认为 kCGImageAlphaPremultipliedFirst 与 kCGImageAlphaPremultipliedLast 可能存在一些问题。当我对乱码图像进行编码时,它们从 kCGImageAlphaPremultipliedFirst 开始,当它们被加载时,它们是 kCGImageAlphaPremultipliedLast。
任何帮助或想法将不胜感激。
艾米@InsatiableGenius
下面的函数被调用:
[self createMask];
[self addImageAndBackground:foregroundImg backgroundImg:backgroundImg];
- (UIImage*)maskImage:(UIImage *)image withMask:(UIImage *)maskImage {
CGImageRef maskRef = maskImage.CGImage;
CGImageRef mask = CGImageMaskCreate(CGImageGetWidth(maskRef),
CGImageGetHeight(maskRef),
CGImageGetBitsPerComponent(maskRef),
CGImageGetBitsPerPixel(maskRef),
CGImageGetBytesPerRow(maskRef),
CGImageGetDataProvider(maskRef), NULL, false);
CGImageRef sourceImage = [image CGImage];
CGImageRef imageWithAlpha = sourceImage;
if ((CGImageGetAlphaInfo(sourceImage) == kCGImageAlphaNone)
|| (CGImageGetAlphaInfo(sourceImage) == kCGImageAlphaNoneSkipFirst)
|| (CGImageGetAlphaInfo(sourceImage) == kCGImageAlphaNoneSkipLast)) {
imageWithAlpha = CopyImageAndAddAlphaChannel(sourceImage);
}
CGImageRef masked = CGImageCreateWithMask(imageWithAlpha, mask);
CGImageRelease(mask);
if (sourceImage != imageWithAlpha) {
CGImageRelease(imageWithAlpha);
}
UIImage* retImage = [UIImage imageWithCGImage:masked];
CGImageRelease(masked);
/* EDIT STARTS HERE return retImage; */
//Added extra render step to force it to save correct alpha values (not the mask)
UIImage* retImage = [UIImage imageWithCGImage:masked];
CGImageRelease(masked);
UIGraphicsBeginImageContext(retImage.size);
[retImage drawAtPoint:CGPointZero];
UIImage *newImg = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
retImage = nil;
return newImg;
}
-(void)createMask{
//take whole screen uiimage from paintview
//user painted black for mask, set rest of window to white
[paintView setWhiteBackground:YES];
//get user painted mask
UIImage *maskFromPaint = [paintView allocNormalResImageWithBlur:NO/*blur?*/];
[self dumpTestImg:maskFromPaint name:@"maskFromPaint"];
UIImage *maskNoAlpha = [maskFromPaint resetImageAlpha:1.0];
[self dumpTestImg:maskNoAlpha name:@"maskFromPaintNoAlpha"];
//mask has to be gray
UIImage *maskFromPaintGray = [self convertImageToGrayScale:maskNoAlpha];
[self dumpTestImg:maskFromPaintGray name:@"maskFromPaintGray"];
//Had to call this normalize function because some pngs are not compatiable (8 bit)
UIImage *disp_original = [[UIImage alloc] initWithCGImage:[[original normalize] CGImage] ];
//Resize original to screen size (alternatively we could upscale the paint... not sure which for now)
disp_original = [disp_original resizedImageWithContentMode:UIViewContentModeScaleAspectFit bounds:inputImageView.frame.size interpolationQuality:kCGInterpolationHigh] ;
CGSize imageInViewSize = disp_original.size;
//use size of displayed original to crop the paintview
CGRect overlayRect = CGRectMake((int)(inputImageView.frame.size.width - imageInViewSize.width) / 2,
(int)(inputImageView.frame.size.height - imageInViewSize.height) / 2,
(int)imageInViewSize.width,
(int)imageInViewSize.height);
//here is the actual crop
//get rectangle from paint that is the same size as the displayed original
CGImageRef maskFromPaintimageRef = CGImageCreateWithImageInRect([maskFromPaintGray CGImage], overlayRect);
UIImage *invertedMaskFromPaint = [UIImage imageWithCGImage:maskFromPaintimageRef];
self.maskImg = [self invertImage:invertedMaskFromPaint];
[self dumpTestImg:self.maskImg name:@"maskFromPaintCropped"];
self.backgroundImg = [self maskImage:disp_original withMask:self.maskImg];
self.foregroundImg = [self maskImage:disp_original withMask:invertedMaskFromPaint];
foregroundImgView.image = foregroundImg;
backgroundImgView.image = backgroundImg;
foregroundImgView.hidden =NO;
backgroundImgView.hidden =NO;
[container bringSubviewToFront:foregroundImgView];
[container bringSubviewToFront:backgroundImgView];
[self dumpTestImg:foregroundImg name:@"foregroundImg"];
[self dumpTestImg:backgroundImg name:@"backgroundImg"];
//cleanup
CGImageRelease(maskFromPaintimageRef);
maskFromPaint = nil;
maskFromPaintGray = nil;
maskNoAlpha = nil;
disp_original = nil;
//put things back
[paintView setWhiteBackground:NO];
}
CGImageRef CopyImageAndAddAlphaChannel(CGImageRef sourceImage) {
CGImageRef retVal = NULL;
size_t width = CGImageGetWidth(sourceImage);
size_t height = CGImageGetHeight(sourceImage);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef offscreenContext = CGBitmapContextCreate(NULL, width, height,
8, 0, colorSpace, kCGImageAlphaPremultipliedLast );
if (offscreenContext != NULL) {
CGContextDrawImage(offscreenContext, CGRectMake(0, 0, width, height), sourceImage);
retVal = CGBitmapContextCreateImage(offscreenContext);
CGContextRelease(offscreenContext);
}
CGColorSpaceRelease(colorSpace);
return retVal;
}
- (UIImage*)invertImage:(UIImage *)sourceImage {
CIContext *context = [CIContext contextWithOptions:nil];
CIFilter *filter= [CIFilter filterWithName:@"CIColorInvert"];
CIImage *inputImage = [[CIImage alloc] initWithImage:sourceImage];
[filter setValue:inputImage forKey:@"inputImage"];
return [UIImage imageWithCGImage:[context createCGImage:filter.outputImage fromRect:filter.outputImage.extent]];
}
-(void)addImageAndBackground:(UIImage *)foregroundImgIn backgroundImg:(UIImage *)backgroundImgIn{
UIImageView *tmpIV;
UIImageView *imgVF = [[UIImageView alloc] initWithImage: foregroundImgIn];
imgVF.userInteractionEnabled = YES;
[self dumpTestImg:foregroundImgIn name:@"foregroundIn"];
UIImageView *imgVB = [[UIImageView alloc] initWithImage: backgroundImgIn];
imgVB.userInteractionEnabled = YES;
[self dumpTestImg:backgroundImgIn name:@"backgroundIn"];
}