//
//  Shader.fsh
//  Renderbunny
//
//  Created by Holmes Futrell on 1/20/11.
//  Copyright 2011 Holmes Futrell. All rights reserved.
//

uniform sampler2D depthSampler;
uniform sampler2D normalSampler;
uniform sampler2D diffuseSampler;
uniform sampler2D albedoSampler;
uniform sampler2D lightmapSampler;


uniform vec2 invImageSize;

uniform mat4 projectionMatrix;
uniform mat4 modelviewMatrix;

varying vec2 uv;

uniform float rmax;

uniform float Q1, Q2, C, D;
uniform int numSamples;


const float PI = 3.14159265;
const float epsilon = 0.000;

uniform vec2 samplePatternSize;
uniform sampler2D samplePatternDirections;

// go from screen coordinates to eye coordinates
vec3 unproject(vec3 v) {
	vec3 worldPos;
	float d1 = -1.0 / Q1;
	float d2 = -1.0 / Q2;
	worldPos.z  = -D / ( 2.0 * v.z - 1.0 + C );
	worldPos.x  = (2.0 * v.x - 1.0) * worldPos.z * d1;
	worldPos.y  = (2.0 * v.y - 1.0) * worldPos.z * d2;
	return worldPos;
}

vec2 normalToImageLighting(vec3 normal) {

	vec2 uv;

	float r = 1.0 / PI * acos(normal.z) * inversesqrt( normal.x * normal.x + normal.y * normal.y );
	float u = normal.x * r;
	float v = normal.y * r;
		
	// image coordinages in range [0,1]
	uv.x = 0.5 * u + 0.5;
	uv.y = 0.5 * v + 0.5;
	return uv;

}

vec3 projectEyeToScreen(vec3 eyeCoordinates) {

	vec3 ndc;
	ndc.x = -Q1 * eyeCoordinates.x / eyeCoordinates.z;
	ndc.y = -Q2 * eyeCoordinates.y / eyeCoordinates.z;
	ndc.z = -C - D / eyeCoordinates.z;
	return vec3( 0.5 * ndc + 0.5 );
}


void main()
{
	
	float depth = texture2D(depthSampler, uv).r;
	
	vec3 normal = texture2D(normalSampler, uv).rgb;
	normal = normalize( 2.0 * normal - 1.0 );
	//normal = normalize( vec3(modelviewMatrix * vec4(normal, 0.0)) );
	
	
	vec3 color = vec3(0.0, 0.0, 0.0);
		
	
	vec3 position = unproject( vec3(uv, depth) );


	vec3 basis1, basis2;
	vec3 absNormal = abs(normal);
	if ( absNormal.x <= absNormal.y && absNormal.x <= absNormal.z ) {
		basis1 = vec3(1.0, 0.0, 0.0);
	}
	else if ( absNormal.y <= absNormal.z ) {
		basis1 = vec3(0.0, 1.0, 0.0);
	}
	else {
		basis1 = vec3(0.0, 0.0, 1.0);
	}
	basis1 -= dot(basis1, normal) * normal;
	basis1 = normalize(basis1);
	basis2 = normalize(cross( normal, basis1 ));

	float nFactor = 1.0 / float(numSamples);

	for (int i=0; i<numSamples; i++) {
		

			vec2 f = mod( gl_FragCoord.xy, samplePatternSize) + vec2(samplePatternSize.x * float(i), 0);
			f.x		/= samplePatternSize.x * float(numSamples);
			f.y		/= samplePatternSize.y;
			vec3 sDir = texture2D(samplePatternDirections, f).rgb;

			vec3 sphereCoords = rmax * sDir.x * basis1 + 
								rmax * sDir.y * basis2 + 
								rmax * sDir.z * normal + 
								position;
	
	
			vec3 uvPrime = projectEyeToScreen(sphereCoords);
		

			
			float sampleDepth = texture2D(depthSampler, uvPrime.xy).r;

			vec3 lightingSample = 1.0 * texture2D(lightmapSampler,	normalToImageLighting(normal)).rgb;
			
			
			vec3 diffuseSample	= texture2D(diffuseSampler,		uvPrime.xy).rgb;
			
			vec3 normalSample   = texture2D(normalSampler,		uvPrime.xy).rgb;
			normalSample = normalize( 2.0 * normalSample - 1.0 );
			//normalSample = normalize( vec3(modelviewMatrix * vec4(normalSample, 0.0)) );


			// todo: merge if and else
			vec3 accumSample;
				
			// we don't multiply by pi here because we don't divide
			// by pi in the BRDF
			float As = rmax * rmax * nFactor;
			float light_k = nFactor;			
			vec3 lmap = light_k * lightingSample;


			vec3 p = unproject( vec3(uvPrime.xy, sampleDepth) );
			vec3 delta = p - position;
			float d_squared = dot( delta, delta );

			float visibility = uvPrime.z < sampleDepth ? 1.0 : min(d_squared / (64.0 * rmax * rmax), 1.0);

								
			float front_face = dot( normalSample, -normalize(delta)	) > 0.0 ? 1.0 : 0.0;
			float bounce_k = As / d_squared;
			vec3  bounce   = bounce_k * diffuseSample;
			
			color += mix( bounce, lmap , visibility);

	}
	
	gl_FragColor = vec4(color, 1.0);

	
}