//
//  KPixel.m
//  ToneMapper
//
//  Created by Andrey on 26/04/2009.
//  Copyright 2009 Karma Software. All rights reserved.
//

#import "KPixel.h"

#define ApplyGammaCorrection(c) if (c <= k0) c /= phi; else c = pow((c + a) / (1.0 + a), gamma)
#define XYZToLab(c) if (c <= t0) c = (1.0 / 3) * (29.0 / 6) * (29.0 / 6) * c + (4.0 / 29); else c = pow(c, 1.0 / 3)
#define LabToXYZ(c) if (c <= t0) c = (c - 16.0 / 116) * 3 * t0 * t0; else c = c * c * c;
#define ApplyInverseGammaCorrection(c) if (c <= k0) c * phi; else c = (1 + a) * pow(c, 1 / gamma) - a

HSBPixelRep RGBToHSB(RGBPixelRep rgbPixel)
{
	HSBPixelRep result;
	
	CGFloat max = MAX(rgbPixel.red, MAX(rgbPixel.green, rgbPixel.blue));
	CGFloat min = MIN(rgbPixel.red, MIN(rgbPixel.green, rgbPixel.blue));
	CGFloat delta = max - min;
	
	result.brightness = max;
	
	if (delta == 0.0)
	{
		result.hue = 0.0;
		result.saturation = 0.0;
	}
	else
	{
		result.saturation = delta / max;

		CGFloat r = (((max - rgbPixel.red) / 6) + (delta / 2)) / delta;
		CGFloat g = (((max - rgbPixel.green) / 6) + (delta / 2)) / delta;
		CGFloat b = (((max - rgbPixel.blue) / 6) + (delta / 2)) / delta;
		
		if (max == rgbPixel.red)
			result.hue = b - g;
		else if (max == rgbPixel.green)
			result.hue = (1.0 / 3) + r - b;
		else if (max == rgbPixel.blue)
			result.hue = (2.0 / 3) + g - r;
		
		if (result.hue < 0.0) result.hue += 1.0;
		if (result.hue > 1.0) result.hue -= 1.0;
	}
	
	return result;
}

CIELabPixelRep RGBToCIELab(RGBPixelRep rgbPixel)
{
	/* Since RGB is a relative color space, first we need to convert it	
	 to an absolute color space, such as CIEXYZ */
	CIELabPixelRep result;
	
	/* Here we assume that source color space is sRGB */
	const CGFloat a = 0.055;
	const CGFloat k0 = 0.04045;
	const CGFloat phi = 12.92;
	const CGFloat gamma = 2.4;
	
	/* At first we apply gamma-correction to linearize component values */
	CGFloat r = rgbPixel.red;
	CGFloat g = rgbPixel.green;
	CGFloat b = rgbPixel.blue;
	
	ApplyGammaCorrection(r);
	ApplyGammaCorrection(g);
	ApplyGammaCorrection(b);
	
	/* Here we use CIE standard 2° observer and D65 white point (noon daylight) */
	CGFloat x = r * 0.4124 + g * 0.3576 + b * 0.1805;
	CGFloat y = r * 0.2126 + g * 0.7152 + b * 0.0722;
	CGFloat z = r * 0.0193 + g * 0.1192 + b * 0.9505;
	
	/* Now we have CIEXYZ values we can convert to CIELab */
	/* These are reference white point values for D65 */
	CGFloat xRef = 0.95047;
	CGFloat yRef = 1.0;
	CGFloat zRef = 1.08883;
	
	/* Normalize tristimulus values */
	x /= xRef;
	y /= yRef;
	z /= zRef;
	
	/* XYZ<->La*b* conversion constants */
	const CGFloat t0 = pow(6.0 / 29, 3);
	XYZToLab(x);
	XYZToLab(y);
	XYZToLab(z);

/*
	result.lightness = 116 * y - 16;
	result.a = 500 * (x - y);
	result.b = 200 * (y - z);
*/
	result.lightness = (116 * y - 16) / 100;
	result.a = x - y;	// 128 / 500 = 0.256 [-0.256 0.256] [-0.86, 0.98]
	result.b = y - z;	// 128 / 200 = 0.64 [-0.64 0.64] [-1.07, 0.94]
	
	/* Experimental normalization */
	CGFloat maxA = 0.256; CGFloat minA = -0.256;
	CGFloat maxB = 0.64; CGFloat minB = -0.64;
	
	result.a = (result.a - minA) / (maxA - minA);
	result.b = (result.b - minB) / (maxB - minB);
	
	return result;
}

RGBPixelRep HSBToRGB(HSBPixelRep hsbPixel)
{
	RGBPixelRep result;
	
	if (hsbPixel.saturation == 0.0)
	{
		result.red = result.green = result.blue = hsbPixel.brightness;
	}
	else
	{
		CGFloat h = hsbPixel.hue;
		CGFloat s = hsbPixel.saturation;
		CGFloat b = hsbPixel.brightness;
		
		h *= 6.0;
		
		if (h == 6) h = 0.0;
		NSInteger roundedH = (NSInteger)h;
		
		CGFloat d = b * (1 - s);
		CGFloat e = b * (1 - s * (h - roundedH));
		CGFloat f = b * (1 - s * (1 - (h - roundedH)));
		
		switch (roundedH) 
		{
			case 0:
				result.red		= b;
				result.green	= f;
				result.blue		= d;
				break;
			case 1:
				result.red		= e;
				result.green	= b;
				result.blue		= d;
				break;
			case 2:
				result.red		= d;
				result.green	= b;
				result.blue		= f;
				break;
			case 3:
				result.red		= d;
				result.green	= e;
				result.blue		= b;
				break;
			case 4:
				result.red		= f;
				result.green	= d;
				result.blue		= b;
				break;
			default:
				result.red		= b;
				result.green	= d;
				result.blue		= e;
				break;
		}
	}
	
	return result;
}

RGBPixelRep CIELabToRGB(CIELabPixelRep cieLabPixel)
{
	RGBPixelRep result;
	
	/* Experimental denormalization */
	CGFloat maxA = 0.256; CGFloat minA = -0.256;
	CGFloat maxB = 0.64; CGFloat minB = -0.64;
	
	cieLabPixel.a = cieLabPixel.a * (maxA - minA) + minA;
	cieLabPixel.b = cieLabPixel.b * (maxB - minB) + minB;
	
/*	
	CGFloat y = (cieLabPixel.lightness * 100 + 16) / 116;
	CGFloat x = y + cieLabPixel.a / 500;
	CGFloat z = y - cieLabPixel.b / 200;
*/
	CGFloat y = (cieLabPixel.lightness * 100 + 16) / 116;
	CGFloat x = y + cieLabPixel.a;
	CGFloat z = y - cieLabPixel.b;
	
	/* XYZ<->La*b* conversion constants */
	const CGFloat t0 = pow(6.0 / 29, 3);
	LabToXYZ(x);
	LabToXYZ(y);
	LabToXYZ(z);
	
	/* These are reference white point values for D65 */
	CGFloat xRef = 0.95047;
	CGFloat yRef = 1.0;
	CGFloat zRef = 1.08883;
	
	x *= xRef;
	y *= yRef;
	z *= zRef;
	
	/* Now we can convert XYZ to sRGB */
	CGFloat r = x *  3.2406 + y * -1.5372 + z * -0.4986;
	CGFloat g = x * -0.9689 + y *  1.8758 + z *  0.0415;
	CGFloat b = x *  0.0557 + y * -0.2040 + z *  1.0570;
	
	const CGFloat a = 0.055;
	const CGFloat k0 = 0.0031308;
	const CGFloat phi = 12.92;
	const CGFloat gamma = 2.4;
	
	ApplyInverseGammaCorrection(r);
	ApplyInverseGammaCorrection(g);
	ApplyInverseGammaCorrection(b);
	
	result.red = r;
	result.green = g;
	result.blue = b;
	
	return result;
}

@implementation KPixel

- (id) init
{
	if (self = [super init])
	{
		bzero(&_rgbRep, sizeof(RGBPixelRep));
		bzero(&_hsbRep, sizeof(HSBPixelRep));
		bzero(&_cieLabRep, sizeof(CIELabPixelRep));
	}
		
	return self;
}

- (id) initWithRed: (CGFloat)red green:(CGFloat)green andBlue: (CGFloat)blue
{
	if (self = [super init])
	{
		bzero(&_rgbRep, sizeof(RGBPixelRep));
		bzero(&_hsbRep, sizeof(HSBPixelRep));
		bzero(&_cieLabRep, sizeof(CIELabPixelRep));
		
		_rgbRep.red		= red;
		_rgbRep.green	= green;
		_rgbRep.blue	= blue;
		
		_hsbRep = RGBToHSB(_rgbRep);
		_cieLabRep = RGBToCIELab(_rgbRep);
	}
	
	return self;
}

- (id) initWithHue: (CGFloat)hue saturation: (CGFloat) saturation andBrightness: (CGFloat)brightness
{
	if (self = [super init])
	{
		bzero(&_rgbRep, sizeof(RGBPixelRep));
		bzero(&_hsbRep, sizeof(HSBPixelRep));
		bzero(&_cieLabRep, sizeof(CIELabPixelRep));
		
		_hsbRep.hue = hue;
		_hsbRep.saturation = saturation;
		_hsbRep.brightness = brightness;
		
		_rgbRep = HSBToRGB(_hsbRep);
		_cieLabRep = RGBToCIELab(_rgbRep);
	}
	
	return self;
}

- (id) initWithLightness: (CGFloat)lightness a: (CGFloat)a andB: (CGFloat)b
{
	if (self = [super init])
	{
		bzero(&_rgbRep, sizeof(RGBPixelRep));
		bzero(&_hsbRep, sizeof(HSBPixelRep));
		bzero(&_cieLabRep, sizeof(CIELabPixelRep));
		
		_cieLabRep.lightness = lightness;
		_cieLabRep.a = a;
		_cieLabRep.b = b;
		
		_rgbRep = CIELabToRGB(_cieLabRep);
		_hsbRep = RGBToHSB(_rgbRep);
	}
	
	return self;
}

- (id) initWithNSColor: (NSColor*)color
{
	if (self = [super init])
	{
		bzero(&_rgbRep, sizeof(RGBPixelRep));
		bzero(&_hsbRep, sizeof(HSBPixelRep));
		bzero(&_cieLabRep, sizeof(CIELabPixelRep));
		
		_rgbRep.red		= [color redComponent];
		_rgbRep.green	= [color greenComponent];
		_rgbRep.blue	= [color blueComponent];
		
		_hsbRep = RGBToHSB(_rgbRep);
		_cieLabRep = RGBToCIELab(_rgbRep);
	}
	
	return self;
}

- (NSColor*) nsColor
{
	return [[NSColor colorWithCalibratedRed:_rgbRep.red 
									 green:_rgbRep.green 
									  blue:_rgbRep.blue
									 alpha:1.0] retain];
}

- (CGFloat) red		{ return _rgbRep.red; }
- (CGFloat) green	{ return _rgbRep.green; }
- (CGFloat) blue	{ return _rgbRep.blue; }

- (CGFloat) hue		{ return _hsbRep.hue; }
- (CGFloat) saturation { return _hsbRep.saturation; }
- (CGFloat) brightness { return _hsbRep.brightness; }

- (CGFloat) lightness { return _cieLabRep.lightness; }
- (CGFloat) a { return _cieLabRep.a; }
- (CGFloat) b { return _cieLabRep.b; }

- (KRedundantColorVector) redundantColorVector
{
	KRedundantColorVector colorVector;
	
	colorVector.data[RedImageChannel]			= [self red];
	colorVector.data[GreenImageChannel]			= [self green];
	colorVector.data[BlueImageChannel]			= [self blue];
	colorVector.data[HueImageChannel]			= [self hue];
	colorVector.data[SaturationImageChannel]	= [self saturation];
	colorVector.data[BrightnessImageChannel]	= [self brightness];
	colorVector.data[LightnessImageChannel]		= [self lightness];
	colorVector.data[AImageChannel]				= [self a];
	colorVector.data[BImageChannel]				= [self b];	
	
	return colorVector;
}

- (KPixel*) blendedPixelWithFraction:(CGFloat)fraction 
							 ofPixel:(KPixel*)anotherPixel
					  usingBlendMode:(enum KPixelBlendMode)blendMode
{
	KPixel* newPixel = [KPixel alloc];
	
	CGFloat red, green, blue;
	CGFloat hue, saturation, brightness;
	switch (blendMode) 
	{
		case NormalBlendMode:
			red		= (1.0 - fraction) * [self red] + fraction * [anotherPixel red];
			green	= (1.0 - fraction) * [self green] + fraction * [anotherPixel green];
			blue	= (1.0 - fraction) * [self blue] + fraction * [anotherPixel blue];
			
			[newPixel initWithRed:red green:green andBlue:blue];
			break;
		case BrightnessBlendMode:
			hue = [anotherPixel hue];
			saturation = [anotherPixel saturation];
			brightness = [self brightness];
			
			[newPixel initWithHue:hue saturation:saturation andBrightness:brightness];
			break;
	
		default:
			break;
	}
	
	return newPixel;
}
@end
