I've got this category method on NSColor: + (NSColor*) patternColorWithData:(unsigned char*)bits oneColor:(NSColor*)oneColor zeroColor:(NSColor*)zeroColor { NSBitmapImageRep* patternBitmap; patternBitmap = [[[NSBitmapImageRep alloc] initWithBitmapDataPlanes:NULL pixelsWide:8 pixelsHigh:8 bitsPerSample:8 samplesPerPixel:3 hasAlpha:NO isPlanar:NO colorSpaceName:NSDeviceRGBColorSpace bytesPerRow:0 bitsPerPixel:0] autorelease]; NSColor* patternColor = nil; long x, y; NSColor* theColor; for ( y = 0; y < 8; y++ ) { for ( x = 0; x < 8; x++ ) { NSUInteger bit = ( bits[y] & ( 1 << x ) ); theColor = ( bit ) ? oneColor : zeroColor; [patternBitmap setColor:theColor atX:x y:y]; } } NSImage* patternImage = [[[NSImage alloc] initWithSize:[patternBitmap size]] autorelease]; [patternImage addRepresentation:patternBitmap];
patternColor = [NSColor colorWithPatternImage:patternImage]; return patternColor; } I invoke it by doing: unsigned char bits[8] = { 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55 }; duelPatternColor = [[NSColor patternColorWithData:bits oneColor:[NSColor blackColor] zeroColor:[NSColor whiteColor]] retain]; However, the patternColor is always solid black and, iI examine the bitmapData of patternBitmap, it remains just a bunch of zeros. If I modify the code such that theColor is always [NSColor greenColor], I will get a solid green color. So, I know the call to setColor is doing something. So, why can I not use [NSColor whiteColor]? I'm sure I'm missing something simple, but what that is, I do not know... _______________________________________________ Cocoa-dev mailing list (Cocoa-dev@lists.apple.com) Please do not post admin requests or moderator comments to the list. Contact the moderators at cocoa-dev-admins(at)lists.apple.com Help/Unsubscribe/Update your Subscription: http://lists.apple.com/mailman/options/cocoa-dev/archive%40mail-archive.com This email sent to arch...@mail-archive.com