在我的应用程序中,我使用相机和照片库来获取 UIImage,
然后将此 UIImage 缩小到其正常大小的 20 倍左右,然后我根据 UIImage 设置一个 NSData 对象。
_regularImage = [self resizeImage:_takenImage width:100 height:100];
-(UIImage *)resizeImage:(UIImage *)anImage width:(int)width height:(int)height
{
CGImageRef imageRef = [anImage CGImage];
CGImageAlphaInfo alphaInfo = CGImageGetAlphaInfo(imageRef);
if (alphaInfo == kCGImageAlphaNone)
alphaInfo = kCGImageAlphaNoneSkipLast;
CGContextRef bitmap = CGBitmapContextCreate(NULL, width, height, CGImageGetBitsPerComponent(imageRef), 4 * width, CGImageGetColorSpace(imageRef), alphaInfo);
CGContextDrawImage(bitmap, CGRectMake(0, 0, width, height), imageRef);
CGImageRef ref = CGBitmapContextCreateImage(bitmap);
UIImage *result = [UIImage imageWithCGImage:ref];
CGContextRelease(bitmap);
CGImageRelease(ref);
return result;
}
NSData *image1Data = UIImageJPEGRepresentation(_regularImage, 1);
我似乎无法弄清楚可能导致这种情况的其他任何事情
谢谢
小瑞