
/****************************************************************************/
/*Copyright (c) 2011, Florent DEVILLE.                                      */
/*All rights reserved.                                                      */
/*                                                                          */
/*Redistribution and use in source and binary forms, with or without        */
/*modification, are permitted provided that the following conditions        */
/*are met:                                                                  */
/*                                                                          */
/* - Redistributions of source code must retain the above copyright         */
/*notice, this list of conditions and the following disclaimer.             */
/* - Redistributions in binary form must reproduce the above                */
/*copyright notice, this list of conditions and the following               */
/*disclaimer in the documentation and/or other materials provided           */
/*with the distribution.                                                    */
/* - The names of its contributors cannot be used to endorse or promote     */
/*products derived from this software without specific prior written        */
/*permission.                                                               */
/* - The source code cannot be used for commercial purposes without         */ 
/*its contributors' permission.                                             */
/*                                                                          */
/*THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS       */
/*"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT         */
/*LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS         */
/*FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE            */
/*COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,       */
/*INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,      */
/*BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;          */
/*LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER          */
/*CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT        */
/*LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN         */
/*ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE           */
/*POSSIBILITY OF SUCH DAMAGE.                                               */
/****************************************************************************/

#include "RTRaytracer.h"


#include "RTScene.h"

#include "RTIPrimitive.h"

#include <math.h>
#include <iostream>
#include "RTMaterial.h"

//use to get access to lights function to calculate illumination.
#include "RTILight.h"

namespace RT
{
	//Initialise the instance variable
	RTRaytracer* RTRaytracer::m_instance = 0;

	//generate the primary ray for a specific pixel
	void RTRaytracer::generateRay(I32 x, I32 y, RTRay& ray)const
	{
		//generate the primary ray starting from the camera and going to the scene
		ray.setDirection(RTVector3f(
			m_precompWidth * (x - m_pixelWidth/2.f),
			m_precompHeight * (y - m_pixelHeight/2.f),
			m_fov));

		//normalize the direction
		ray.getDirection().normalize();

		//if left_handed coordinate system, reverse the z axis
		if(_handedness == LEFT_HANDED)
			ray.getDirection().z = - ray.getDirection().z;
	}

	//generate oversampling rays
	void RTRaytracer::generateOverSamplingRay(I32 x, I32 y, RTRay* ray)const
	{
		//create the first ray
		RTVector3f offset(
			m_precompWidth * (x - m_pixelWidth/2.f),
			m_precompHeight * (y - m_pixelHeight/2.f),
			m_fov);

		//if left_handed coordinate system, reverse the z axis
		if(_handedness == LEFT_HANDED)
			offset.z = - offset.z;

		//create all the other ray
		for(I32 i = 1; i<m_overSamplingLevel*m_overSamplingLevel; i++)
		{
			//calculate the offset
			I32 tempX = i % m_overSamplingLevel;
			I32 tempY = i / m_overSamplingLevel;

			//put the offset in a vector
			RTVector3f newOffset = offset + RTVector3f(tempX * m_precompPixelOffsetX, tempY * m_precompPixelOffsetY, 0);
			newOffset.normalize();

			//set the ray's direction
			ray[i].setDirection(newOffset);
		}

		//normalize the first ray (has to be done here)!!!
		offset.normalize();
		ray[0].setDirection(offset);
	}

	/*Compute the color of a ray*/
	void RTRaytracer::computeColor(const RTRay& ray, RTColor& color, I32 /*reflection*/, F32 /*refractionIndex*/)const
	{

		//distance between the ray's origin and the RTPrimitive
		F32 dist = -1;

		//intersection poI32 between the ray and the RTPrimitive
		RTVector3f center;

		//the RTPrimitive which intersects with the ray
		RTIPrimitive* p = 0;

		//calculate intersection
		RAYTRACER.PIPELINEgetPrimitive(ray, dist, center, &p);
		if(dist == -1)
			return;

		//calculate the raw color of the pixel (from texture, material, etc)
		RTColor rawPixelColor;
		RAYTRACER.PIPELINEgetRawColor(*p, center, rawPixelColor);

		//calculate the normal (RTPrimitive + bump)
		RTVector3f normal;
		RAYTRACER.PIPELINEgetNormal(*p, ray, center, dist, normal);
	
		//calculate the color using light
		RAYTRACER.PIPELINEcomputeLight(ray.getDirection(), center, normal, *p, rawPixelColor, color);

		//calculate transparency
		//F32 RTPrimitiveRefractionIndex = (F32)p->getMaterial()->m_refractionIndex;
		//RAYTRACER.PIPELINEcomputeTransparency(ray, dist, normal, refractionIndex, RTPrimitiveRefractionIndex, color, color);

		return;

		////transparency
		//if(p->getMaterial()->m_transparancyCoef != 0 )
		//{
		//	color = color * (1 - p->getMaterial()->m_transparancyCoef);

		//	//get the refracted vector
		//	RTVector3f newDirection = getRefractionVector(normal, ray.getDirection(), 
		//		ray.getOrigin()+ray.getDirection()*dist, refractionIndex, p->getMaterial()->m_refractionIndex);

		//	//get the fresnel coefficient
		//	F32 reflectanceValue = reflectance(normal, ray.getDirection(), refractionIndex, 
		//		p->getMaterial()->m_refractionIndex);
		//	F32 refractionValue = 1 - reflectanceValue;

		//	RTColor secondColor;
		//	if(refractionValue > 0.001)
		//	{
		//		//get the transmitted ray
		//		RTRay transmittedRay(ray.getOrigin()+ray.getDirection()*(dist + 0.01f), newDirection);
		//		RTColor transmittedColor(0, 0, 0);
		//		computeColor(transmittedRay, transmittedColor, reflection, 
		//			(F32)p->getMaterial()->m_refractionIndex, !inside);
		//		secondColor += transmittedColor * refractionValue;
		//	}

		//	if(reflectanceValue > 0.001 && inside == false)
		//	{
		//		RTRay reflectedRay(ray.getOrigin()+ray.getDirection()*(dist + 0.01f), 
		//			getReflectionVector(normal, ray.getDirection()));
		//		RTColor reflectedColor(0, 0, 0);
		//		computeColor(reflectedRay, reflectedColor, reflection, (F32)p->getMaterial()->m_refractionIndex, inside);
		//		secondColor += reflectedColor * reflectanceValue;
		//	}

		//	color += secondColor * p->getMaterial()->m_transparancyCoef;
		//}

		//if(p->getMaterial()->m_reflect == RTColor(1, 1, 1))//mirror
		//	color = RTColor(0, 0, 0);

		////calculate reflection
		//if(reflection>0 && p->getMaterial()->m_reflect != RTColor(0, 0, 0))
		//{
		//	//get the new ray
		//	RTPoint3f newOrigin = ray.getOrigin()+(ray.getDirection()*dist);
		//	RTVector3f newDirection = getReflectionVector(normal, ray.getDirection());
		//	newOrigin = newOrigin + (newDirection * 0.1f);

		//	//create the reflected ray
		//	RTRay secondRay(newOrigin, newDirection);

		//	//new color
		//	RTColor secondColor(0, 0, 0);
		//	computeColor(secondRay, secondColor, reflection-1, refractionIndex, inside);

		//	//add the new color.
		//	color += secondColor * p->getMaterial()->m_reflect;

		//}
	}

	/*Update the precomputed variables*/
	void RTRaytracer::updateParameters()
	{
		m_precompWidth = m_width/m_pixelWidth;
		m_precompHeight = m_height/m_pixelHeight;

		m_precompPixelOffsetX = m_precompWidth;
		m_precompPixelOffsetY = m_precompHeight;
	}

	/*Default Constructor*/
	RTRaytracer::RTRaytracer():m_origin(0, 0, 0), m_direction(0, 0, 1), m_pixelWidth(800), m_pixelHeight(450),
		m_fov(20), m_height(16), m_width(9), m_overSamplingLevel(NO_OVERSAMPLING), _colorSwapped(false),
		_stopRendering(false), _handedness(LEFT_HANDED)
	{
		F32 m_fov = (3.1416f * 20) / 180;
		m_fov /= 2;
		m_fov = (m_width / 2) / tan(m_fov);

		updateParameters();

	}

	/*Constructor*/
	RTRaytracer::RTRaytracer(I32 pixelWidth, I32 pixelHeight, F32 width, F32 height, F32 fov)
		:m_origin(0, 0, 0), m_direction(0, 0, 1), m_pixelWidth(pixelWidth), m_pixelHeight(pixelHeight),
		m_fov(fov), m_height(height), m_width(width), m_overSamplingLevel(NO_OVERSAMPLING), _handedness(LEFT_HANDED)
	{
		//convert fov in rad
		F32 m_fov = (3.1416f * fov) / 180;
		m_fov /= 2;
		m_fov = (width / 2) / tan(m_fov);

		updateParameters();

	}

	/*Destructor*/
	RTRaytracer::~RTRaytracer(){}

	//singleton gettors
	RTRaytracer& RTRaytracer::getInstance()
	{
		if(m_instance == 0)
			m_instance = new RTRaytracer();

		return *m_instance;
	}

	/*Destroy the singleton*/
	void RTRaytracer::close()
	{
		if(m_instance != 0)
		{
			delete m_instance;
			m_instance = 0;
		}
	}

	//prepare the raytracer for rendering
	void RTRaytracer::preDraw()
	{
		_stopRendering = false;
	}

	/*Render the scene*/
	void RTRaytracer::draw(char* screenBuffer, I32 reflection)const
	{
		//apply the view matrix to the scene
		RTSCENE.applyViewMatrix(m_viewMatrix);

		const I32 nbColors = 3;
		I32 numberOfRays = m_overSamplingLevel*m_overSamplingLevel;
		RTRay* ray = 0;
		ray = new RTRay[numberOfRays];

		for(I32 i = 0; i<numberOfRays; i++)
			ray[i].setOrigin(m_origin);
	
		I32 numberOfLine=0;
		for(I32 j = m_pixelHeight-1; j>= 0; j--) //start at the last line j = Y
		{
			for(I32 i = 0; i<m_pixelWidth; i++) //i = X
			{
	#ifdef _DEBUG
				//START
				//to breakpoI32 at a specific pixel
				I32 x = 168;
				I32 y = 163;
				I32 yR = m_pixelHeight - y - 1;

				//if(j == yR &&  i == x)
					//I32 a = 0;
			
				//END
	#endif
				RTColor color;

				generateOverSamplingRay(i, j, ray);//generate the rays
				F32 ratio = 1.f/(numberOfRays);
				for(I32 k = 0; k<numberOfRays; k++)
				{
					//compute color
					RTColor tempColor;

					//calculate the pixel color
					computeColor(ray[k], tempColor, reflection, 1);

					//color average
					color += tempColor * ratio;
				}
			

				//saturate color (between 0 and 1)
				color.saturate();
				//maximize (between 0 and 255)
				color.maximize(255);
			
				//set the color in the picture
				if (_colorSwapped) //GBR
				{
					screenBuffer[nbColors * (numberOfLine*m_pixelWidth + i)] = unsigned char(color.RTVEC4Z);
					screenBuffer[nbColors * (numberOfLine*m_pixelWidth + i)+2] = unsigned char(color.RTVEC4X);
				}
				else //RGB
				{
					screenBuffer[nbColors * (numberOfLine*m_pixelWidth + i)] = unsigned char(color.RTVEC4X);
					screenBuffer[nbColors * (numberOfLine*m_pixelWidth + i)+2] = unsigned char(color.RTVEC4Z);
				}

				screenBuffer[nbColors * (numberOfLine*m_pixelWidth + i)+1] = unsigned char(color.RTVEC4Y);

				//stopped signal
				if (_stopRendering)
				{
					delete[] ray;
					return;
				}
			}
			numberOfLine++;

			//calculate progress
			F32 percent = (numberOfLine+1)/(F32)m_pixelHeight*100;

			//only 2 number after the poI32
			I32 t = I32(percent*100);
			percent = (F32)t/100;
			if(percent>100)
				percent = 100;

			//display
			std::cout<<"\r"<<"PROGRESS : "<<percent<<" %   ";

			/*VISU.preRender();
			VISU.doRender();
			VISU.postRender();*/
		
		}

		delete[] ray;
	}

	/*Set camera position, direction and up vector*/
	void RTRaytracer::setViewer(const RTVector3f& eye, const RTVector3f& at, const RTVector3f& up)
	{
		//calculate the viex matrix
		RTVector3f zaxis = at - eye; zaxis.normalize(); 

		//if left_handed coordinate system, reverse the z axis
		if(_handedness == LEFT_HANDED)
			zaxis = -zaxis;

		RTVector3f xaxis = up.cross(zaxis); xaxis.normalize();
		RTVector3f yaxis = zaxis.cross(xaxis); yaxis.normalize();

		m_viewMatrix.identity();

		m_viewMatrix.m_array[0][0] = xaxis.x;
		m_viewMatrix.m_array[0][1] = yaxis.x;
		m_viewMatrix.m_array[0][2] = zaxis.x;

		m_viewMatrix.m_array[1][0] = xaxis.y;
		m_viewMatrix.m_array[1][1] = yaxis.y;
		m_viewMatrix.m_array[1][2] = zaxis.y;

		m_viewMatrix.m_array[2][0] = xaxis.z;
		m_viewMatrix.m_array[2][1] = yaxis.z;
		m_viewMatrix.m_array[2][2] = zaxis.z;

		m_viewMatrix.m_array[3][0] = -xaxis.dot(eye);
		m_viewMatrix.m_array[3][1] = -yaxis.dot(eye);
		m_viewMatrix.m_array[3][2] = -zaxis.dot(eye);	
	}

	/*Set picture resolution*/
	void RTRaytracer::setResolution(I32 pixelWidth, I32 pixelHeight)
	{
		m_pixelWidth = pixelWidth;
		m_pixelHeight = pixelHeight;

		updateParameters();
	}

	/*Set screen ratio and field of view*/
	void RTRaytracer::setScreen(F32 ratioWidth, F32 ratioHeight, F32 fov)
	{
		m_height = ratioHeight;
		m_width = ratioWidth;
	
		//convert fov in rad
		F32 m_fov = (3.1416f * fov) / 180;
		m_fov /= 2;
		m_fov = (m_width / 2) / tan(m_fov);

		updateParameters();
	}

	/*Set oversampling level used to render the scene*/
	void RTRaytracer::setOverSampling(OVERSAMPLING lvl_ovs)
	{
		m_overSamplingLevel = lvl_ovs;
		m_precompPixelOffsetX = m_precompWidth / m_overSamplingLevel;
		m_precompPixelOffsetY = m_precompHeight / m_overSamplingLevel;

	}

	/*Get the pixel width of the picture*/
	I32 RTRaytracer::getPixelWidth()const{return m_pixelWidth;}

	/*Get the pixel height of the picture*/
	I32 RTRaytracer::getPixelHeight()const{return m_pixelHeight;}

	/*Get the scene's view matrix*/
	RTMatrix44 RTRaytracer::getViewMatrix()const{return m_viewMatrix;}

	/*Return the refraction vector*/
	RTVector3f RTRaytracer::getRefractionVector(const RTVector3f& normal, const RTVector3f& incident, const RTVector3f& /*hit*/,
											  F64 n1, F64 n2)const
	{
		const F64 n = n1 / n2;
		const F64 cosI = -normal.dot(incident);
		F64 sinT2 = n * n * (1.0 - cosI * cosI);
		if (sinT2 > 1.0)
			return RTVector3f(0, 0, 0);

		//RTVector3f T=incident*n+hit*(n*cosI-sqrt(1.0-sinT2));
		RTVector3f T=incident*(F32)n+normal*((F32)n*(F32)cosI-sqrt(1.f-(F32)sinT2));
		T.normalize();
		return T;
	}

	/* Return the normalized reflection vector of the incident vector*/
	RTVector3f RTRaytracer::getReflectionVector(const RTVector3f& normal, const RTVector3f& incident)const
	{
		const F64 cosI = normal.dot(-incident);
		RTVector3f R = incident + (normal * (F32)cosI * 2);
		R.normalize();
		return R;
	}

	/*Calcualte the refletance when going through an object*/
	F32 RTRaytracer::reflectance(const RTVector3f& normal, const RTVector3f& incident, F64 n1, F64 n2)const
	{
		F64 r0 = (n1 - n2) / (n1 + n2);
		r0 *= r0;
		F64 cosI = -normal.dot(incident);
		if(n1 > n2)
		{
			const F64 n = n1 / n2;
			const F64 sinT2 = n * n * (1 - cosI * cosI);
			if(sinT2 > 1) return 1;
			cosI = sqrt(1 - sinT2);
		}
		const F64 x = 1 - cosI;
		return (F32)(r0 + (1 - r0) * x * x * x * x * x); 
	} 

	//Pipeline rendering function returning the intersected primitive.
	inline void RTRaytracer::PIPELINEgetPrimitive(const RTRay& primaryRay, F32& dist, RTPoint3f& center, RTIPrimitive** RTPrimitive)
	{
		//ask to the scene for the intersection
		I32 index = RTSCENE.getFirstCollision(primaryRay, dist);
		if(index<0) // no intersection, so return
			return;
		//get the RTPrimitive
		*RTPrimitive = RTSCENE.getPrimitive(index);

		//calculate the intersection poI32
		center = primaryRay.getOrigin()+primaryRay.getDirection()*dist;
	}

	//Pipeline rendering function returning the raw pixel color.
	inline void RTRaytracer::PIPELINEgetRawColor(const RTIPrimitive& primitive, const RTVector3f& center, RTColor& output)
	{
		//calculate the intersection poI32 (center) I32o local RTPrimitive coordinates.
		RTVector3f I32erLC = center * primitive.getInverseWorldViewMatrix();

		//ask to the material for the diffuse component (the raw clolor of the pixel)
		output = primitive.getMaterial()->getDiffuse(I32erLC, primitive);
	}

	//Pipeline rendering function calculating the normal
	inline void RTRaytracer::PIPELINEgetNormal(const RTIPrimitive& primitive, const RTRay& primaryRay, const RTPoint3f& center, const F32& dist, 
		RTVector3f& output)
	{
		//Calculate normal
		//RTNormal3f normal;
		primitive.computeNormal(primaryRay, dist, output);

		//add the bump map
		RTVector3f bumpNormal = primitive.getMaterial()->getBumpNormal(center);
		output += bumpNormal;
		output.normalize();
	}

	//Pipeline rendering function calculating the color of a pixel using light
	inline void RTRaytracer::PIPELINEcomputeLight(const RTVector3f& direction, const RTPoint3f& center, const RTVector3f& normal, 
		const RTIPrimitive& primitive, const RTColor& input, RTColor& output)
	{
		RTColor t_color;
		//get material
		RTMaterial* caracteristics = primitive.getMaterial();

		//go throught all the lights
		U32 nbOfLights = RTSCENE.getNumberOfLights();
		for(U32 lightCount = 0; lightCount < nbOfLights; lightCount ++)
		{
			//get the light
			RTILight* ptrLight = RTSCENE.getLight(lightCount);
	
			//compute the shadow ray
			RTRay shadowRay;
			F32 lightDistance = -1;
			ptrLight->computeShadowRay(center, shadowRay, &lightDistance);
		
			//check if there is something between the object and the light
			if(!RTSCENE.testCollision(shadowRay, lightDistance)) //no collision
			{
				//get the light color and light direction vector
				RTColor lightColor = ptrLight->computeColor(shadowRay, lightDistance);
				RTVector3f L = shadowRay.getDirection();

				//LAMBERT
				F32 LdotN = L.dot(normal); 
				if(LdotN > 0.f)
				{
					//t_color += lightColor * caracteristics->getDiffuse(I32erLC, p)*LdotN;
					t_color += lightColor * input * LdotN;
				
					//PHONG
					RTVector3f lightReflection = L - (normal * 2.0f * LdotN);
					F32 dot = direction.dot(lightReflection); 
					if(dot > 0.f)
						t_color += lightColor * powf(dot, caracteristics->m_shininess)*caracteristics->getSpecular(center, primitive);

					//BLINN PHONG
					/*RTVector3f H = L + view;
					H.normalize();
					F32 NdotH = normal.dot(H);
					if(NdotH > 0.f)
						t_color += (*i)->getColor()*powf(NdotH, caracteristics.m_shininess)*caracteristics.m_specular;*/
				}
			}
		}
	
		//AMBIENT
		//if(caracteristics->useBumpMapping())
		//	t_color += normal*caracteristics->getAmbient(I32erLC, RTPrimitive)*SCENE.getGlobalAmbientLight();
		//else

		//center is in view coordinates
		RTVector3f I32erLC = center * primitive.getInverseWorldViewMatrix();
		t_color += caracteristics->getAmbient(I32erLC, primitive)*RTSCENE.getGlobalAmbientLight();

		//saturate (between 0 and 1)
		t_color.saturate();

		output = t_color;
	}

	//Pipeline rendering function calculating the transparancy color
	inline void RTRaytracer::PIPELINEcomputeTransparency(const RTRay& incidentRay, F32 dist,
		const RTNormal3f& normal, F32 incidentRefraction, F32 reflectedRefraction,
		const RTColor& inputColor, RTColor& outputColor)
	{
		//no transparency
		if (inputColor.RTVEC4W == 1)
		{
			outputColor = inputColor; 
			return;
		}

		//calculate I32eresection poI32
		RTVector3f center = incidentRay.getOrigin() + incidentRay.getDirection() * dist;
		RTVector3f newCenter = incidentRay.getOrigin() + incidentRay.getDirection() * (dist + 0.01f);

		//get the refracted vector
		RTVector3f newDirection = getRefractionVector(normal, incidentRay.getDirection(), center, incidentRefraction, reflectedRefraction);

		//reflectance : how much is reflected
		F32 reflectanceValue = reflectance(normal, incidentRay.getDirection(), 1, 1);

		//refraction : how much goes through
		F32 refractionValue = 1 - reflectanceValue;


		RTColor secondColor;
		if(refractionValue > 0.001) //calculate reflection
		{
			//get the transmitted ray
			RTRay transmittedRay(newCenter, newDirection);
			RTColor transmittedColor;
			computeColor(transmittedRay, transmittedColor, 0, reflectedRefraction);
			secondColor += transmittedColor * refractionValue;
		}

		//if(reflectanceValue > 0.001) // && !inside
		//{
		//	RTRay reflectedRay(newCenter, getReflectionVector(normal, incidentRay.getDirection()));
		//	RTColor reflectedColor;
		//	computeColor(reflectedRay, reflectedColor, 0, reflectedRefraction);
		//	secondColor += reflectedColor * reflectanceValue;
		//}

		//calculate final color
		outputColor = inputColor * inputColor.RTVEC4W + secondColor * (1 - inputColor.RTVEC4W);
	}

	//set the color swapped flag. If true, color order is BGR. If false : RGB
	void RTRaytracer::setColorSwapped(bool colorSwapped)
	{
		_colorSwapped = colorSwapped;
	}

	//set the stop signal for the rendering loop
	void RTRaytracer::setStopSignal(bool stop)
	{
		_stopRendering = stop;
	}

	//set the coordinate system (use LEFT_HANDED or RIGHT_HANDED)
	void RTRaytracer::setCoordinateSystem(bool handedness)
	{
		_handedness = handedness;
	}

}