#version 460
#extension GL_GOOGLE_include_directive : enable
#extension GL_EXT_nonuniform_qualifier : enable
#extension GL_EXT_shader_explicit_arithmetic_types_int64 : require
#extension GL_EXT_shader_explicit_arithmetic_types_int16 : require
#extension GL_EXT_shader_explicit_arithmetic_types_int8 : require


#include "../../../../core/shader_binding_ID.h"
#include "../../../../core/shader_std.h"
#include "../../../../core/shader_built_in_functions.h"

//#include "PBR_MaterialAtt.glsl"


layout(location=0) out vec4 outColor;
//layout(location=1) out vec4 outVert;
//layout(location=2) out vec4 outNormal;

layout(set = 1, binding = DEF_BINDING_WIDGET_Texture) uniform sampler2D texColor[];

layout(set = 2, binding = DEF_BindID_TextureCube) uniform samplerCube texEnv[];
layout(set = 2, binding = DEF_BindID_TextureShadow) uniform sampler2D texShadow[];
layout(set = 2, binding = DEF_BindID_TexCubeShadow) uniform samplerCube texCubeShadow[];
layout(set = 2, binding = DEF_BindID_GBufferImage)  uniform sampler2D GBuffer_Image[];

layout(set = 2, binding = DEF_BindID_CompositingShadow) uniform sampler2D compositing_shadow[];


layout(std140, set = 2, scalar, binding = DEF_BindID_GlobalTextAtt) uniform Buf0 {
	S_GlobalAtt_RasterPostCompositing m_constPush;
};

//每个灯光的阴影图层数量
layout(std430, set = 2, column_major, scalar, binding = DEF_BindID_TexShadow_LightIndex) readonly buffer Buf1 {
	S_LightIndex m_LightIndex[];
};

//光投影矩阵
layout(std430, set = 2, column_major, scalar, binding = DEF_BindID_TexShadow_PVMatrix) readonly buffer Buf3 {
	mat4 m_LightArea_PVMatrix[];
};

//区域光属性
layout(std430, set = 2, column_major, scalar, binding = DEF_BindID_TexShadow_AreaLight) readonly buffer Buf2 {
	S_LightArea m_LightArea[];
};

//材质属性
layout(std430, set = 2, scalar, column_major, binding = DEF_BINDING_TasterBindID_MaterialID) readonly buffer Buf4 {
	S_Material gMaterial[];
};




layout(location = 0) in vec3 inEyeDir;
layout(location = 1) in vec3 inNormal;
layout(location = 2) in vec4 inVert;
layout(location = 3) in flat mat4 inCameraView;
layout(location = 7) in flat uint inMaterialID;


layout(early_fragment_tests) in;


#include "../光栅渲染/灯光阴影.glsl"
#include "../光栅渲染/纹理模糊计算.glsl"

vec3 f_ComputeUniformHemisphereSample(vec2 uv) {
	// 计算phi（方位角），从0到2π
	float phi = atan(uv.y - 0.5, uv.x - 0.5) + M_PI;
	//float phi = uv.x * 2.0 * M_PI;
	//float phi = uv.x * M_PI;

	// 使用sin(theta)分布来调整theta采样，实现更均匀的采样
	float theta = acos(sqrt(1.0 - uv.y));
	//float theta = uv.y * radians(desiredFovY);
	//theta = acos(sqrt(1.0 - (theta * 0.0055555)));
	//float theta = (1.0 - uv.y) * M_PI * 2;

	// 将调整后的phi和theta转换为笛卡尔坐标系下的向量
	float sinTheta = sin(theta);
	float cosTheta = cos(theta);
	float sinPhi = sin(phi);
	float cosPhi = cos(phi);

	// 返回半球面上的点
	return vec3(sinTheta * cosPhi, cosTheta, sinTheta * sinPhi);
	//return vec3(sinTheta * sinPhi, cosTheta, sinTheta * cosPhi);
}

vec3 f_SphereSampling_toWorld3(vec3 ss, vec3 N, vec3 offsetDir) {
    // 初始化一个与N不同的上向量up，避免与N平行
    vec3 up = normalize(vec3(0, 1, 0) + offsetDir + f_random(ss.xz) * 0.00001);

	//up.xz += f_random(ss.xy) * 0.00001;

    // 如果N的y分量接近1或-1，选择另一个非零向量作为up
    //if (abs(N.y) > 0.9999) {
    //    up = vec3(1.0, 0.0, 0.0);
    //}

    // 计算右向量right，使用cross函数计算叉乘
    vec3 right = normalize(cross(N, up));

    // 重新计算上向量up，确保它与N和right都垂直
    up = normalize(cross(right, N));

    // 构造局部坐标系的矩阵
    mat3 TBN = mat3(right, up, N);

    // 使用局部坐标系的矩阵将球坐标系下的采样点转换到世界坐标系
    vec3 worldPosition = TBN * vec3(ss.x, ss.z, ss.y);

    return worldPosition;
}

float lerp(float a, float b, float f) {
    return a + f * (b - a);
}



const vec3 w = vec3(0.2125,0.7154,0.0721);

void main(void) {
	float stepSample = max(m_constPush.m_resolutionRatio.x, m_constPush.m_resolutionRatio.y);
	//vec2 sampleOffsetSize = vec2(1.0) / stepSample;
	vec2 invResolution = 1.0 / m_constPush.m_resolutionRatio;

	vec2 screenCoords = vec2(gl_FragCoord.xy) / m_constPush.m_resolutionRatio;
	outColor = vec4(0.2,0.0,0.0,1);
	//return;
	vec3 rv = reflect(normalize(-inEyeDir), inNormal);
	
	S_Material mate = gMaterial[inMaterialID];

	vec3 irradiance = vec3(0);
	vec3 albedo = vec3(0.4,0.2,0.1);
	vec3 metallicRoughness = vec3(0.1);
	vec3 F0 = vec3(0.04);

	//切线法线
	vec3 N = normalize(inNormal);
	vec3 R = reflect(normalize(inEyeDir), N);
	float VD = 1 / length(inEyeDir);

	//环境纹理
	vec4 env = texture(texEnv[0], N, 0);
	irradiance.rgb = env.rgb;
	
	
	vec4 env_s = texture(texEnv[1], rv, 0);
	irradiance += vec3(env_s.rgb * env_s.a) * mate.m_reflect;

	//视线高光
	float vs = max(dot(normalize(-inEyeDir), inNormal), 0);
	vs = pow(vs, 60)* VD;
	irradiance += mate.m_specular * vs;
	irradiance *= m_constPush.m_envIntensity;


	vec3 lightColor = vec3(0.001);
	float s = 0;
	float a = 0;
	float contactShadow = 0;
	float maxDis = 0;
	
	for(uint i = 0; i < uint(m_constPush.light_Num); ++i){
		S_LightIndex light = m_LightIndex[m_constPush.light_Offset + i];

		switch(uint(light.m_lightType)){
			case DEF_ObType_Ligth_Parallel : {
				break;
			}
			case DEF_ObType_Ligth_Point : {
				break;
			}
			case DEF_ObType_Ligth_Area : {
				S_LightArea area = m_LightArea[light.m_attIndex];
				mat4 projMatrix0 = m_LightArea_PVMatrix[light.m_matrixIndex];
				vec4 shadowVert = projMatrix0 * inVert;
				
				
				/************** 阴影计算 *************/
				//maxDis = f_light_contactShadow(invResolution, inCameraView, area);
				//break;
				//a = f_light_blurShadow(texShadow[light.m_shadowTexIndex], invResolution, area, light, maxDis);
				//contactShadow = texture(texShadow[light.m_shadowTexIndex], screenCoords.xy).r;
				
				/******************** 灯光计算 ******************/
				//shadowVert.xy += 1;
				//shadowVert.xy *= 0.5;
				//float z = texture(texShadow[light.m_shadowTexIndex], shadowVert.xy).r;
				//shadowVert.z -= 0.0001;
				
				//float ld = max(dot(N, -area.m_dir), 0) * area.m_intensity;
				//if(r < 0.008) r += 1.1;
				//float r = area.m_diffusivity * 0.01;
				//float m = 0;
				
				//m *= 0.04;
				//s -= min(m, 1);
				//s += area.m_diffusivity;
				//s += area.m_dis;
				//lightAreaIt = lightAreaIt * 2 + 1;
				//s += min( a * a * a / (pow(lightAreaIt, 1)), 1) * 20;
				//s += a;
				//s = texture(texShadow[light.m_shadowTexIndex], vec2(gl_FragCoord.xy) * invResolution).r * 10;
				
				//s += (1 - smoothstep(0.001, 0.05, m / 0.05)) * 0.5;
				vec3 L = (area.m_pos - inVert.xyz);
				float ld = max(dot(N, -area.m_dir), 0) * max(dot(normalize(L), -area.m_dir), 0);
				//float ld = max(dot(normalize(L), -area.m_dir), 0);

				vec3 lightIntensity = area.m_color;
				//lightColor += vec3(ld) * area.m_intensity;
				
				
				float LI = 1 - smoothstep(0, 1, clamp(0, area.m_dis, length(L)) / area.m_dis);
				lightIntensity *= vec3(LI) * pow(ld, 3) * area.m_intensity;
				lightIntensity += pow(max(dot(R, normalize(L)), 0) * ld, 10) * (1-mate.m_roughness);
				
				lightColor += lightIntensity;
				
				
				//lightColor = max(lightColor, vec3(0.1));
				//float b = step(0.0001, s.r);
				//float c = 1 - sign(a);
				break;
			}
			case DEF_ObType_Ligth_Spot : {
				break;
			}
		}
		//outColor.xyz = inVert.xyz;
		s = max(s, a);
	}

	//outColor.xyz = vec3(maxDis);
	//return;

	//return;
	//阴影汇集
	s /= float(m_constPush.light_Num);
	s *= m_constPush.m_shadowIntensity;
	s = 1 - clamp(s, 0, 1);
	s = (s - 0.3) * 1.4 + 0.3;
	
	

	

	
	float sampleDelta = 0.5;
	int nrSamples = 0; 
	

	


	//vec2 sampleOffset = gl_SamplePosition.xy * invResolution * 2;
	//vec4 OriPoint = inCameraView * vec4(inVert.xyz, 0);
	float AO = 0;
	//float AO = f_ssao(inCameraView, invResolution, screenCoords);

	const float desiredFovY = radians(mate.m_roughness * 180);
	//const int ao_X_ItNum = (m_constPush.m_AONum >> 16);
	//const int ao_Y_ItNum = (m_constPush.m_AONum & 0x0000ffff);
	const vec2 spaceSampleOffset = vec2(1.0f/(m_constPush.m_AO_X_Sample>>2), float(1.0 / m_constPush.m_AO_Y_Sample));

	//vec4 coVert = texture(GBuffer_Image[DEF_GBuffer_Vert], gl_FragCoord.xy * invResolution);
	//vec2 currentUV = gl_FragCoord.xy * invResolution;
	float courrentPointZ = (inCameraView * vec4(inVert.xyz, 0)).z;
	//float zInv = 1 / m_constPush.m_radiusAO;
	float ao_tInv = 1 / m_constPush.m_thicknessAO;
	float w = gl_FragCoord.z * gl_FragCoord.w * 2 + 1;

	

	//s = 1 - clamp(s, 0, 1);
	//s = min(s, AO);
	float c = s * AO;
	//AO *= s;
	
	//irradiance *= s;
	//float d1 = dot(w, irradiance);
	//float d2 = dot(w, lightColor);
	//irradiance = vec3(0.3);
	//c += 0.1;
	
	//outColor.xyz = ((irradiance * (1-mate.m_reflect) + mate.m_color) * lightColor) * s;
	outColor.xyz = (irradiance + (mate.m_color * (1 - mate.m_reflect) * mate.m_roughness) + lightColor) * s;
	////outColor.xyz = irradiance * s + lightColor;
	//outColor.xyz = irradiance;
	return;

	//outColor.xyz = vec3(AO);
	
	if(gl_FragCoord.x > m_constPush.m_resolutionRatio.x/2){
		outColor.xyz = vec3(gl_FragCoord.z * gl_FragCoord.w);
	}else{

	}
	return;
	//outColor.xyz = vec3(s);
	//return;
	//irradiance *= vec3(s);

	//contactShadow = texture(texShadow[0], screenCoords.xy).r;
	//outColor.xyz = vec3(contactShadow);
	//return;
	//outColor.xyz *= vec3(1 - min(AO * m_constPush.m_intensityAO, 1));
	
	float ao = gaussianBlurR(compositing_shadow[0], screenCoords, invResolution, 4);
	//float contactShadow = gaussianBlurG(compositing_shadow[0], screenCoords, invResolution, 1);

	vec4 indirectVert = texture(GBuffer_Image[DEF_GBuffer_Vert], screenCoords);
	outColor.xyz = vec3(indirectVert);
	return;

	outColor.xyz = vec3(contactShadow);
	//return;
	
	//float rz = ((inCameraView * vec4(inVert.xyz, 0))).z * gl_FragCoord.w;
	//outColor.xyz = vec3(rz);
	//return;

	//if(gl_FragCoord.x > 1280/2){
	//	outColor.xyz = vec3(inVert);
	//}else{
	//	outColor.xyz = vec3(indirectVert);
	//}
	
	//if(gl_FragCoord.x > 1280/2){
	//	outColor.xyz = vec3(inVert * gl_FragCoord.w);
	//}else{
	//	outColor.xyz = vec3(indirectVert * gl_FragCoord.w);
	//}

	//if(gl_FragCoord.x > 1280/2){
	//	outColor.xyz = vec3(inCameraView * vec4(inVert.xyz, 0) * gl_FragCoord.w) * 0.5 + 0.5;
	//}else{
	//	outColor.xyz = vec3(inCameraView * vec4(indirectVert.xyz, 0) * gl_FragCoord.w) * 0.5 + 0.5;
	//}

	/*if(gl_FragCoord.x > 1280/2){
		outColor.xyz = vec3(inCameraView * vec4(inVert.xyz, 0) * gl_FragCoord.w) * 0.5 + 0.5;

		indirectVert = texture(GBuffer_Image[DEF_GBuffer_Vert], outColor.xy);
		outColor.xyz = indirectVert.xyz;

	}else{
		outColor.xyz = vec3(indirectVert);
	}*/


	if(gl_FragCoord.x > 1280/2){
		outColor.xyz = vec3((inCameraView * vec4(inVert.xyz, 0)).z * gl_FragCoord.w) * 0.5 + 0.5;
	}else{
		outColor.xyz = vec3((inCameraView * vec4(inVert.xyz, 0)).z);
	}
	return;

	/*
	if(gl_FragCoord.x > 1280/2){
		outColor.xyz = vec3(inVert);
	}else{
		outColor.xyz = vec3(N);
	}
	*/


	vec4 Vert0 = texture(GBuffer_Image[DEF_GBuffer_Vert], gl_FragCoord.xy * invResolution);
	outColor.xyz = vec3(Vert0);
	return;

	//vec4 Vert0 = texture(GBuffer_Image[DEF_GBuffer_Vert], (inCameraView * vec4(inVert.xyz, 0)).xy * gl_FragCoord.w * 0.5 + 0.5);
	//outColor.xyz = vec3(Vert0);

	//vec4 Vert0 = vec4((inCameraView * vec4(inVert.xyz, 1)).xy * gl_FragCoord.w * 0.5 + 0.5, 0, 1);
	//outColor.xyz = vec3(Vert0);

	//outColor.xyz = vec3((inCameraView * (vec4(indirectVert.xyz, 0) * gl_FragCoord.w) * 0.5 + 0.5).z);
	//outColor.xyz = vec3(texture(compositing_shadow[0], screenCoords).xyz);
	//outColor.xyz *= vec3(min(contactShadow + 0.5, 1));
	
	//outColor.xyz = irradiance + lightColor;
	//outColor.xyz = lightColor * vec3(s);
	//outColor.xyz += irradiance;
	//outColor.xyz = lightColor * s;

	//vec3 AO_Point = inVert.xyz;
	//vec4 ndc = inCameraView * vec4(AO_Point, 0);
	//vec2 sv1 = (ndc.xy * gl_FragCoord.w * 0.5 + 0.5);
	//vec4 vert = texture(GBuffer_Image[DEF_GBuffer_Vert], sv1);
	//if(gl_FragCoord.x < 1280/2){
	//	//vert = inCameraView * vec4(vert.xyz, 0);
	//	outColor.xyz = vec3(vert.xyz * gl_FragCoord.w * 0.5 + 0.5);
	//	//outColor.xyz = vec3(vert);
	//	//outColor.xyz = vec3(sv1, 0);
	//}
	//else{
	//	//outColor.xyz = vec3( gl_FragCoord.z * gl_FragCoord.w);
	//	//outColor.xyz = vec3(gl_FragCoord.xyz * gl_FragCoord.w);
	//	//vert = inCameraView * vec4(inVert.xyz, 0);
	//	//outColor.xyz = vec3(gl_FragCoord.z / gl_FragCoord.w);
	//	vert = texture(GBuffer_Image[DEF_GBuffer_Vert], screenCoords);
	//	outColor.xyz = vec3(vert);
	//	//outColor.xyz = vec3(screenCoords, 0);
	//}
	//
	//vec3 SampleVec = f_SphereSampling(vec2(0, 1.57));
	//SampleVec = f_SphereSampling_toWorld(SampleVec, N);
	//outColor.xyz = vec3(SampleVec);
	//float depth = vert.w;


	//vec3 worldFragPos = vec3(screenCoords, depth);
	//vec3 fragNormal = normalize(texture(GBuffer_Image[DEF_GBuffer_Normal], screenCoords).rgb * 2.0 - 1.0); mat3(m_ProjMatrix) * 
	//vec3 fragNormal = texture(GBuffer_Image[DEF_GBuffer_Normal], screenCoords).rgb;
	

	//float ao = CalculateAO(screenCoords, worldFragPos, fragNormal);

	//outColor.xyz = vec3(ao);
	//outColor.xyz += normalize(inNormal);
	//outColor.xyz += normalize(rv) * 0.3;
	//outColor += texture(texEnv[0], vec3(1,0,0));
}




	/*for(int x = 0; x < m_constPush.m_AO_X_Sample; ++x){
		//break;
		for(int y = 0; y < m_constPush.m_AO_Y_Sample; ++y){
			
			float c = 0;
			
			//AO算法1
			vec3 SampleVec = f_SphereSampling( (spaceSampleOffset * vec2(x, y) + vec2(1,-0.5)) );

			SampleVec *= w;
			//vec3 tangent = normalize(SampleVec - inNormal * dot(SampleVec, inNormal));
			//vec3 bitangent = cross(inNormal, tangent);
			//mat3 TBN = mat3(tangent, bitangent, inNormal);
			//SampleVec *= TBN;
			//SampleVec *= vec3(1.0f + float(y) * 0.1);

			//SampleVec.xz = random(spaceSampleOffset * vec2(x, y));
			//vec3 SampleVec = (vec3(spaceSampleOffset * x, 0, spaceSampleOffset * y) - 0.5);
			vec2 sv1 = SampleVec.xz * m_constPush.m_radiusAO * invResolution;
			vec4 indirectVert = texture(GBuffer_Image[DEF_GBuffer_Vert], screenCoords + sv1);
			vec3 indirectNormal = texture(GBuffer_Image[DEF_GBuffer_Normal], screenCoords + sv1).xyz;
			float IndirectPointZ = (inCameraView * vec4(indirectVert.xyz, 0)).z;
			

			c = courrentPointZ - IndirectPointZ;
			
			if ((c) < m_constPush.m_thicknessAO && c > 0) {
				vec3 zVec = inVert.xyz - indirectVert.xyz;
				float z = length(zVec);
				
				c = z / m_constPush.m_thicknessAO;
				c = 0;
				//c = c / m_constPush.m_thicknessAO;
				float d = 1 - abs(min(0, dot(indirectNormal, inNormal)));
				//d = 1 - clamp(0, 1, d);
				AO += 1 - smoothstep(0, 1, (min(1, dot(SampleVec.xz, SampleVec.xz))) * (1-c) * d);
				//AO += (1 - max(dot(SampleVec, SampleVec),1));// * (1 - abs(dot(indirectNormal, inNormal)));
				//AO += 1;
			}
			continue;

			//AO算法2
			//SampleVec = normalize(f_SphereSampling_toWorld_noise(SampleVec, N));
			//vec3 SampleVec = f_ComputeUniformHemisphereSample(vec2(spaceSampleOffset.x * x, spaceSampleOffset.y * y));
			vec3 SampleVec = f_SphereSampling( (spaceSampleOffset * vec2(x, y) + vec2(0,-0.5)) ) * (1.0f + float(y) * 0.1);
			SampleVec = f_SphereSampling_toWorld3(SampleVec, N, -inEyeDir * 0.0005);
			
			const float stepOffset = m_constPush.m_radiusAO / 8;
			//const float l = stepOffset * ao_itNum;
			for(uint i=0; i <= 8; ++i){
				//辐射射线移动坐标
				vec3 AO_Point = inVert.xyz + (SampleVec * ((1.0+i) * stepOffset));
				//vec4 ndc = inCameraView * vec4(AO_Point, 0) * gl_FragCoord.w * 0.5 + 0.5;
				vec4 rPoint = inCameraView * vec4(AO_Point, 0);
				vec2 sv1 = rPoint.xy * gl_FragCoord.w * 0.5 + 0.5;// + sampleOffset;
				
				//辐射屏幕坐标
				vec4 indirectVert = texture(GBuffer_Image[DEF_GBuffer_Vert], sv1);
				//vec3 indirectNormal = texture(GBuffer_Image[DEF_GBuffer_Normal], sv1).xyz;

				//float sP = gl_FragCoord.z * gl_FragCoord.w;
				//vec4 IndirectPoint = inCameraView * vec4(indirectVert.xyz, 0) * gl_FragCoord.w * 0.5 + 0.5;
				float IndirectPointZ = (inCameraView * vec4(indirectVert.xyz, 0)).z - 0.0001;
				c = rPoint.z - IndirectPointZ;
				//c = clamp(c, 0, m_constPush.m_thicknessAO);

				//c = IndirectPoint.z - (OriPoint.z * gl_FragCoord.w * 0.5 + 0.5);
				if ((c) < m_constPush.m_thicknessAO && c >= 0) {
					//c = 1 - step(c, m_constPush.m_thicknessAO);

					vec3 zVec = inVert.xyz - indirectVert.xyz;
					float z = length(zVec);
					z = clamp(0, z, m_constPush.m_radiusAO);
					//if(z >= m_constPush.m_thicknessAO) break;

					//c = 1 - abs(dot(inNormal, indirectNormal)) * cos(theta) * sin(theta);
					//AO += (1 - (z / (m_constPush.m_thicknessAO * 2))) * (1-abs(dot(indirectNormal, N)));
					//AO += (1 - (z / (m_constPush.m_radiusAO))) * (1 - min(dot(indirectNormal, inNormal), 0));
					AO += 1 - smoothstep(0, 1, z / (m_constPush.m_radiusAO));
					//AO += 0.5;
					//break;
				}
			}
		}
	}*/

	//irradiance = M_PI * irradiance * (1.0 / float(nrSamples));
	

	//AO = M_PI * AO * (1.0 / float(nrSamples));
	//AO /= float(sampleCount * sampleCount * ao_itNum);
	//AO算法1
	//{
		//AO /= float(m_constPush.m_AO_X_Sample * m_constPush.m_AO_Y_Sample);
		//AO *= m_constPush.m_intensityAO;
		//AO -= 0.5;
		//AO *= 2;
		//AO = 1-clamp(AO, 0, 1);
		//AO = pow(AO, 1);
	//}
	//outColor.xyz = vec3(AO);
	//return;

	//AO算法2
	//{
		//AO /= float(sampleCount * sampleCount);
		//AO *= m_constPush.m_intensityAO;
		//AO = 1-clamp(AO, 0, 1);
	//}
	
	//AO -= 0.5;
	//AO = 1 - min(AO, 1);
	//irradiance = irradiance;
	//f_SphereSampling_toWorld(TangentSample, rv);
	//irradiance += texture(texEnv[0], N).rgb;
	
	
	
	//if (gl_SampleID == 0) {
	//}
	//barriers();
	//outVert = vec4(vec3(1), gl_FragDepth);
	//outNormal = vec4(vec3(1), s);
	//outColor.xyz = vec3(s);
	//s = texture(compositing_shadow[0], screenCoords).b;

	//s = gaussianBlurB(compositing_shadow[0], screenCoords, invResolution, 2);
	//outColor.xyz = vec3(ao_X_ItNum);
	//return;

	/*outColor.xyz = vec3(s);
	return;
	*/
	