3

如何CGImageSourceRef从原始数据创建一个?我有一个仅包含图像像素信息的文件。我知道分辨率和深度等(例如 640x860、RGB、8 位、方向 = 1、DPI = 300)。这些信息不存储在文件中。正如我已经写过的,这个文件只存储原始像素信息。

现在我尝试了以下方法:

NSString *path = @"/Users/.../Desktop/image";
NSData *data = [NSData dataWithContentsOfFile: path];
CFDataRef cfdata = CFDataCreate(NULL, [data bytes], [data length]);
CFDictionaryRef options;
CGImageSourceRef imageSource = CGImageSourceCreateWithData(cfdata, nil);

由于未定义的图像尺寸,图像未正确创建。我不知道如何为此定义图像信息(分辨率等)CFImageSourceRef。认为我必须初始化CFDictionaryRef options并将其交付给

CGImageSourceRef imageSource = CGImageSourceCreateWithData(cfdata, options);

如何创建CFDictionaryRef可用于该方法的CGImageSourceCreateWithData

4

2 回答 2

2

您不想使用CGImageSource. 这不适用于原始像素数据。它适用于标准图像文件格式(PNG、GIF、JPEG 等)。您应该CGImage直接使用创建CGImageCreate()

NSString *path = @"/Users/.../Desktop/image";
NSData *data = [NSData dataWithContentsOfFile: path];
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
CGColorSpaceRef colorspace = CGColorSpaceCreateWithName(kCGColorSpaceGenericRGB);
CGImageRef image = CGImageCreate(640, // width
                                 860, // height
                                 8, // bitsPerComponent
                                 32, // bitsPerPixel
                                 4 * 640, // bytesPerRow
                                 colorspace,
                                 kCGImageAlphaNoneSkipFirst, // bitmapInfo
                                 provider,
                                 NULL, // decode
                                 true, // shouldInterpolate
                                 kCGRenderingIntentDefault // intent
                                 );
CGColorSpaceRelease(colorspace);
CGDataProviderRelease(provider);

上面的一些(bitsPerComponent, bitsPerPixel, bytesPerRow, bitmapInfo)是根据您对像素数据的简要描述的猜测。如果它们不适合您的数据,请调整它们。

CGDataProviderCreateWithURL()您可以使用or直接从文件中创建数据提供者CGDataProviderCreateWithFilename(),但我决定说明使用可能来自任何地方的原始数据创建它的更通用方法。

于 2015-01-08T16:39:31.777 回答
0

我不确定这段代码是否可以帮助你,但通过这种方式我可以创建从网络下载的图像。最初我在AFNetworking类之一中找到了这段代码。

- (UIImage *)imageFromResponse:(NSHTTPURLResponse *)response data:(NSData *)data scale:(CGFloat)scale {

    if (!data || [data length] == 0) {
        return nil;
    }

    CGImageRef imageRef = NULL;
    CGDataProviderRef dataProvider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);

    if ([response.MIMEType isEqualToString:@"image/png"]) {
        imageRef = CGImageCreateWithPNGDataProvider(dataProvider,  NULL, true, kCGRenderingIntentDefault);
    } else if ([response.MIMEType isEqualToString:@"image/jpeg"]) {
        imageRef = CGImageCreateWithJPEGDataProvider(dataProvider, NULL, true, kCGRenderingIntentDefault);

        // CGImageCreateWithJPEGDataProvider does not properly handle CMKY, so if so, fall back to AFImageWithDataAtScale
        if (imageRef) {
            CGColorSpaceRef imageColorSpace = CGImageGetColorSpace(imageRef);
            CGColorSpaceModel imageColorSpaceModel = CGColorSpaceGetModel(imageColorSpace);
            if (imageColorSpaceModel == kCGColorSpaceModelCMYK) {
                CGImageRelease(imageRef);
                imageRef = NULL;
            }
        }
    }

    CGDataProviderRelease(dataProvider);

    UIImage *anImage = [[UIImage alloc] initWithData:data];
    UIImage *image = [[UIImage alloc] initWithCGImage:[anImage CGImage] scale:scale orientation:anImage.imageOrientation];

    if (!imageRef) {
        if (image.images || !image) {
            return image;
        }

        imageRef = CGImageCreateCopy([image CGImage]);
        if (!imageRef) {
            return nil;
        }
    }

    size_t width = CGImageGetWidth(imageRef);
    size_t height = CGImageGetHeight(imageRef);
    size_t bitsPerComponent = CGImageGetBitsPerComponent(imageRef);

    if (width * height > 1024 * 1024 || bitsPerComponent > 8) {
        CGImageRelease(imageRef);

        return image;
    }

    size_t bytesPerRow = 0; // CGImageGetBytesPerRow() calculates incorrectly in iOS 5.0, so defer to CGBitmapContextCreate
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGColorSpaceModel colorSpaceModel = CGColorSpaceGetModel(colorSpace);
    CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef);

    if (colorSpaceModel == kCGColorSpaceModelRGB) {
        uint32_t alpha = (bitmapInfo & kCGBitmapAlphaInfoMask);
        if (alpha == kCGImageAlphaNone) {
            bitmapInfo &= ~kCGBitmapAlphaInfoMask;
            bitmapInfo |= kCGImageAlphaNoneSkipFirst;
        } else if (!(alpha == kCGImageAlphaNoneSkipFirst || alpha == kCGImageAlphaNoneSkipLast)) {
            bitmapInfo &= ~kCGBitmapAlphaInfoMask;
            bitmapInfo |= kCGImageAlphaPremultipliedFirst;
        }
    }

    CGContextRef context = CGBitmapContextCreate(NULL, width, height, bitsPerComponent, bytesPerRow, colorSpace, bitmapInfo);

    CGColorSpaceRelease(colorSpace);

    if (!context) {
        CGImageRelease(imageRef);
        return image;
    }

    CGContextDrawImage(context, CGRectMake(0.0f, 0.0f, width, height), imageRef);
    CGImageRef inflatedImageRef = CGBitmapContextCreateImage(context);

    CGContextRelease(context);

    UIImage *inflatedImage = [[UIImage alloc] initWithCGImage:inflatedImageRef scale:scale orientation:image.imageOrientation];

    CGImageRelease(inflatedImageRef);
    CGImageRelease(imageRef);

    return inflatedImage;
}
于 2015-01-08T11:14:33.000 回答