UIImage is a fairly thin wrapper for a CGImage (so most of this applies to standard Mac dev too), but CGImage is an opaque type. You can't get at the raw image data (actually you can, but it's in it's encoded form).
If you want to test individual pixel values you must render the image into a bitmap context, having supplied the context with a memory buffer that you control.
This is not as complex as it sounds, but still has quite a few steps - and hunting around for the information may take you down a few blind alleys or unnecessary steps.
Erica Sadun, in her iPhone Developer's Cookbook has a recipe for this, but I'd already come up with some code beforehand. I believe mine is more concise, and doesn't suffer from the sub-pixel problem (that an earlier version of mine had, and I believe Sadun's is susceptible to too). So while I recommend the book, I'm presenting my code here too.
As written it is only interested in alpha values, but some small tweaks will let you get at the colour information too.
//
// AlphaPixels.h
//
// Created by Phil Nash on 26/10/2008.
// Copyright 2008 Two Blue Cubes Software
//
// Distributed under the Boost Software License, Version 1.0.
// (see: http://www.boost.org/LICENSE_1_0.txt)
#import <UIKit/UIKit.h>
@interface AlphaPixels : NSObject
{
unsigned char* pixelData;
int width;
}
-(id) initWithImage: (UIImage*) image;
-(float) alphaAtX:(int) x y:(int) y;
@end
//
// AlphaPixels.m
//
// Created by Phil Nash on 26/10/2008.
// Copyright 2008 Two Blue Cubes Software
//
// Distributed under the Boost Software License, Version 1.0.
// (see: http://www.boost.org/LICENSE_1_0.txt)
//
#import "AlphaPixels.h"
@implementation AlphaPixels
-(id) initWithImage: (UIImage*) image
{
// We'll be needing a chunk of memory to hold the rendered (alpha channel of the) image...
width = image.size.width;
int height = image.size.height;
pixelData = malloc( width * height );
if( !pixelData )
{
NSException *exception = [NSException exceptionWithName:@"AlphaPixelsException"
reason:@"Unable to allocate memory for pixel data"
userInfo:nil];
@throw exception;
}
// ... and a bitmap context to describe how to render - the alpha only constant
// is the important bit here (no need to provide a colorspace)
CGContextRef context = CGBitmapContextCreate ( pixelData,
width,
height,
8,
width,
NULL,
kCGImageAlphaOnly );
if( !context )
{
NSException *exception = [NSException exceptionWithName:@"AlphaPixelsException"
reason:@"Unable to create bitmap context"
userInfo:nil];
@throw exception;
}
// Render the image into the context (ending up in our buffer)
CGContextDrawImage( context, CGRectMake(0, 0, width, height), image.CGImage );
CGContextRelease( context );
return self;
}
-(void) dealloc
{
free( pixelData );
[super dealloc];
}
-(float) alphaAtX:(int) x y:(int) y
{
// Simple calculation to get the offset into the buffer for the coordinate values
// - but note these are all integer values. Floats representing sub-pixel values would
// need to be rounded before doing something similar
return pixelData[y * width + x]/255;
}
@end