I have an color image. I want return pixel value on touch of image. So, I tried below code :
- (void) touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event{
float x, y;
for (UITouch *touch in touches) {
x = [touch locationInView:imageView].x;
y = [touch locationInView:imageView].y;
if ((x >= 0) && (x <= imageView.frame.size.width) && (y >= 0) && (y <= imageView.frame.size.height)) {
[self getRGBAFromImage:imageView.image atX:x andY:y];
}
}
}
- (void)getRGBAFromImage:(UIImage *)image atX:(int)xx andY:(int)yy {
CGImageRef imageRef = [image CGImage];
NSUInteger width = CGImageGetWidth(imageRef);
NSUInteger height = CGImageGetHeight(imageRef);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
unsigned char *rawData = malloc(height * width * 4);
NSUInteger bytesPerPixel = 4;
NSUInteger bytesPerRow = bytesPerPixel * width;
NSUInteger bitsPerComponent = 8;
CGContextRef context = CGBitmapContextCreate(rawData,
width,
height,
bitsPerComponent,
bytesPerRow,
colorSpace,
kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
CGColorSpaceRelease(colorSpace);
CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef);
CGContextRelease(context);
int byteIndex = (bytesPerRow * yy) + xx * bytesPerPixel;
CGFloat red = (rawData[byteIndex] * 1.0) ;
CGFloat green = (rawData[byteIndex + 1] * 1.0) ;
CGFloat blue = (rawData[byteIndex + 2] * 1.0) ;
CGFloat alpha = (rawData[byteIndex + 3] * 1.0) / 255.0;
self.textView.textColor=[UIColor colorWithRed:red/255.0 green:green/255.0 blue:blue/255.0 alpha:alpha];
free(rawData);
}
This will return the pixels but not the correct one !! For EXAMPLE : Touched color in ImageView is red , but my textView text is not red color.
Please help me confirm my code.
Thanks!
Here is the category for UIImage that I use when I need to get the colour of a specific pixel on a image:
@implementation UIImage (ColorImage)
/*
Returns the color of the image pixel at point. Returns nil if point lies outside of the image bounds.
If the point coordinates contains a decimal part, it will be truncated (trunc).
This method retreives the pixel data of the image by drawing the specific pixel onto a bitmap.
To query pixel colors of the same image many times, it is best to cache the drawn image for better performance.
*/
- (UIColor *)colorAtPixel:(CGPoint)point {
// Cancel if the point is outside of the image coordinates
if (!CGRectContainsPoint(CGRectMake(0.0f, 0.0f, self.size.width, self.size.height), point)) {
return nil;
}
// Create a 1x1 pixel byte array and bitmap context to draw the pixel onto.
// Reference: http://stackoverflow.com/questions/1042830/retrieving-a-pixel-alpha-value-for-a-uiimage
NSInteger pointX = trunc(point.x);
NSInteger pointY = trunc(point.y);
CGImageRef cgImage = self.CGImage;
NSUInteger width = CGImageGetWidth(cgImage);
NSUInteger height = CGImageGetHeight(cgImage);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
int bytesPerPixel = 4;
int bytesPerRow = bytesPerPixel * 1;
NSUInteger bitsPerComponent = 8;
unsigned char pixelData[4] = { 0, 0, 0, 0 };
CGContextRef context = CGBitmapContextCreate( pixelData, 1, 1, bitsPerComponent, bytesPerRow, colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
CGColorSpaceRelease(colorSpace);
CGContextSetBlendMode(context, kCGBlendModeCopy);
// Draw the pixel we are interested in
CGContextTranslateCTM(context, -pointX, -pointY);
CGContextDrawImage(context, CGRectMake(0.0f, 0.0f, (CGFloat)width, (CGFloat)height), cgImage);
CGContextRelease(context);
// Convert the color values [0...255] to floats [0.0f...1.0f]
CGFloat r = (CGFloat)pixelData[0] / 255.0f;
CGFloat g = (CGFloat)pixelData[1] / 255.0f;
CGFloat b = (CGFloat)pixelData[2] / 255.0f;
CGFloat a = (CGFloat)pixelData[3] / 255.0f;
return [UIColor colorWithRed:r green:g blue:b alpha:a];
}
@end
You can call -colorAtPixel
directly on a UIImage
and it will return the color as a UIColor
.