2

我的 .m ImageView 类文件中有这个函数

- (UIColor*) getPixelColorAtLocation:(CGPoint)point {

如何从我的视图控制器调用此函数?我试过`

    - (void)viewDidLoad
{
    CGPoint point = CGPointMake(100, 100);
    UIColor* color = [ImageView getPixelColorAtLocation:point];
}`

但这给了我一个警告说:“没有已知的扇区'getPixelColorAtLocation:'的类方法(我确实将ImageView类文件导入到视图控制器)

我在 ImageView 中的 .h 文件

    #import <Foundation/Foundation.h>
@interface ImageView : UIImageView {
UIColor* lastColor;
//id pickedColorDelegate;
}

@property (nonatomic, retain) UIColor* lastColor;
//@property (nonatomic, retain) id pickedColorDelegate;


- (UIColor*) getPixelColorAtLocation:(CGPoint)point;
- (CGContextRef) createARGBBitmapContextFromImage:(CGImageRef)inImage;
@end

我在 ImageView 中的 .m 文件

    #import "ImageView.h"
#import "ViewController.h"
#import <CoreGraphics/CoreGraphics.h>
#import <QuartzCore/CoreAnimation.h>
@implementation ImageView

@synthesize lastColor;
//@synthesize pickedColorDelegate;
- (id)initWithFrame:(CGRect)frame
{
    self = [super initWithFrame:frame];
    if (self) {
        // Initialization code
    }
    return self;
}

- (void) touchesEnded:(NSSet*)touches withEvent:(UIEvent*)event {
    if (self.hidden==YES) {
        //color wheel is hidden, so don't handle  this as a color wheel event.
        [[self nextResponder] touchesEnded:touches withEvent:event];
        return;
    }

    UITouch* touch = [touches anyObject];
   // ColorPickerAppDelegate *mainDelegate = (ColorPickerAppDelegate *)[[UIApplication sharedApplication] delegate];
    CGPoint point = [touch locationInView:self]; //where image was tapped

    self.lastColor = [self getPixelColorAtLocation:point];
    NSLog(@"color %@",lastColor);
    //[pickedColorDelegate pickedColor:(UIColor*)self.lastColor];
}


- (UIColor*) getPixelColorAtLocation:(CGPoint)point {
    UIColor* color = nil;
    CGImageRef inImage = self.image.CGImage;
    // Create off screen bitmap context to draw the image into. Format ARGB is 4 bytes for each pixel: Alpa, Red, Green, Blue
    CGContextRef cgctx = [self createARGBBitmapContextFromImage:inImage];
    if (cgctx == NULL) { return nil; /* error */ }

    size_t w = CGImageGetWidth(inImage);
    size_t h = CGImageGetHeight(inImage);
    CGRect rect = {{0,0},{w,h}};

    // Draw the image to the bitmap context. Once we draw, the memory
    // allocated for the context for rendering will then contain the
    // raw image data in the specified color space.
    CGContextDrawImage(cgctx, rect, inImage);

    // Now we can get a pointer to the image data associated with the bitmap
    // context.
    unsigned char* data = CGBitmapContextGetData (cgctx);
    if (data != NULL) {
        //offset locates the pixel in the data from x,y.
        //4 for 4 bytes of data per pixel, w is width of one row of data.
        int offset = 4*((w*round(point.y))+round(point.x));
        int alpha =  data[offset];
        int red = data[offset+1];
        int green = data[offset+2];
        int blue = data[offset+3];
        NSLog(@"offset: %i colors: RGB A %i %i %i  %i",offset,red,green,blue,alpha);
        color = [UIColor colorWithRed:(red/255.0f) green:(green/255.0f) blue:(blue/255.0f) alpha:(alpha/255.0f)];
    }

    // When finished, release the context
    CGContextRelease(cgctx);
    // Free image data memory for the context
    if (data) { free(data); }

    return color;
}



- (CGContextRef) createARGBBitmapContextFromImage:(CGImageRef) inImage {

    CGContextRef    context = NULL;
    CGColorSpaceRef colorSpace;
    void *          bitmapData;
    int             bitmapByteCount;
    int             bitmapBytesPerRow;

    // Get image width, height. We'll use the entire image.
    size_t pixelsWide = CGImageGetWidth(inImage);
    size_t pixelsHigh = CGImageGetHeight(inImage);

    // Declare the number of bytes per row. Each pixel in the bitmap in this
    // example is represented by 4 bytes; 8 bits each of red, green, blue, and
    // alpha.
    bitmapBytesPerRow   = (pixelsWide * 4);
    bitmapByteCount     = (bitmapBytesPerRow * pixelsHigh);

    // Use the generic RGB color space.
    colorSpace = CGColorSpaceCreateDeviceRGB();

    if (colorSpace == NULL)
    {
        fprintf(stderr, "Error allocating color space\n");
        return NULL;
    }

    // Allocate memory for image data. This is the destination in memory
    // where any drawing to the bitmap context will be rendered.
    bitmapData = malloc( bitmapByteCount );
    if (bitmapData == NULL)
    {
        fprintf (stderr, "Memory not allocated!");
        CGColorSpaceRelease( colorSpace );
        return NULL;
    }

    // Create the bitmap context. We want pre-multiplied ARGB, 8-bits
    // per component. Regardless of what the source image format is
    // (CMYK, Grayscale, and so on) it will be converted over to the format
    // specified here by CGBitmapContextCreate.
    context = CGBitmapContextCreate (bitmapData,
                                     pixelsWide,
                                     pixelsHigh,
                                     8,      // bits per component
                                     bitmapBytesPerRow,
                                     colorSpace,
                                     kCGImageAlphaPremultipliedFirst);
    if (context == NULL)
    {
        free (bitmapData);
        fprintf (stderr, "Context not created!");
    }

    // Make sure and release colorspace before returning
    CGColorSpaceRelease( colorSpace );

    return context;
}
4

3 回答 3

0

在类的.h文件中定义您的方法ImageView

于 2013-06-17T06:27:02.030 回答
0

我知道了!好吧,事实证明,当我创建 ImageView 类时,它没有连接到我的目标,这就是它没有接收任何信号或允许我调用该类的方法的原因。苹果又来了,让你的生活变得痛苦!无论如何,要将类文件连接到目标,我按照以下步骤操作:

  • 使用 XCode 4,在 Project Navigator 中,选择包含它所抱怨的类的 .m 文件(我的情况是 ImageView)
  • 转到查看->实用程序->显示文件检查器(这将在右侧显示文件检查器,带有 .m 文件信息)
  • 打开 Target Membership 部分并确保为此 .m 文件选择了您的目标
于 2013-06-17T07:25:35.663 回答
0

with alloc] init]如果您使用,则必须在实例 ( )中调用 ImageView

- (UIColor*) getPixelColorAtLocation:(CGPoint)point 

或使其成为静态

+ (UIColor*) getPixelColorAtLocation:(CGPoint)point
于 2013-06-17T06:22:58.967 回答