let SCREEN_WIDTH,SCREEN_HEIGHT;
//let scene, camera, renderer, controls, light, model,hdrLoader;
let canvas, context;
let container, stats;
let controls;
let pathTracingScene, screenCopyScene, screenOutputScene;
let pathTracingUniforms, screenCopyUniforms, screenOutputUniforms;
let pathTracingDefines;
let pathTracingVertexShader, pathTracingFragmentShader;
let screenCopyVertexShader, screenCopyFragmentShader;
let screenOutputVertexShader, screenOutputFragmentShader;
let pathTracingGeometry, pathTracingMaterial, pathTracingMesh;
let screenCopyGeometry, screenCopyMaterial, screenCopyMesh;
let screenOutputGeometry, screenOutputMaterial, screenOutputMesh;
let pathTracingRenderTarget, screenCopyRenderTarget;
let quadCamera, worldCamera;
let renderer, clock;
let frameTime, elapsedTime;
let fovScale;
let increaseFOV = false;
let decreaseFOV = false;
let apertureSize = 0.0;
let increaseAperture = false;
let decreaseAperture = false;
let lock =  false;
let focusDistance = 132.0;
let increaseFocusDist = false;
let decreaseFocusDist = false;
let pixelRatio = 1;
let windowIsBeingResized = false;
let TWO_PI = Math.PI * 2;
let sampleCounter = 1.0;
let frameCounter = 1.0;
let keyboard = new THREEx.KeyboardState();
let cameraIsMoving = false;
let cameraRecentlyMoving = false;
let isPaused = true;
let oldYawRotation, oldPitchRotation;
let mobileJoystickControls = null;
let oldDeltaX = 0,
    oldDeltaY = 0;
let newDeltaX = 0,
    newDeltaY = 0;
let pinchDeltaX = 0;
let fontAspect;
let useGenericInput = true;
let sunAngularDiameterCos;
let EPS_intersect;
let blueNoiseTexture;
// the following variables will be used to calculate rotations and directions from the camera
let cameraDirectionVector = new THREE.Vector3(); //for moving where the camera is looking
let cameraRightVector = new THREE.Vector3(); //for strafing the camera right and left
let cameraUpVector = new THREE.Vector3(); //for moving camera up and down
let cameraWorldQuaternion = new THREE.Quaternion(); //for rotating scene objects to match camera's current rotation
let cameraControlsObject; //for positioning and moving the camera itself
let cameraControlsYawObject; //allows access to control camera's left/right movements through mobile input
let cameraControlsPitchObject; //allows access to control camera's up/down movements through mobile input

let PI_2 = Math.PI / 2; //used by controls below

let infoElement = document.getElementById('info');
infoElement.style.cursor = "default";
infoElement.style.userSelect = "none";
infoElement.style.MozUserSelect = "none";


let mouseControl = true;
let fileLoader = new THREE.FileLoader();
let HDRI_ExposureValue = 1;
let MaterialType =1;
let SphereSize = 1;
let renderControl = 0;

var sceneIsDynamic = false;
var camFlightSpeed = 60;
var ModelPath = [];
var hdrPath = [];
var gltfLoader = new THREE.OBJLoader();
var hdrTexture, hdrLoader;
var modelMesh;
var modelScale = 1.0;
var modelPositionOffset = new THREE.Vector3();
var albedoTexture;
var total_number_of_triangles = 0;
var triangle_array;
var triangleMaterialMarkers = [];
var pathTracingMaterialList = [];
var uniqueMaterialTextures = [];
var meshList = [];
var geoList = [];
var triangleDataTexture;
var aabb_array;
var aabbDataTexture;
var totalWork;
var vp0 = new THREE.Vector3();
var vp1 = new THREE.Vector3();
var vp2 = new THREE.Vector3();
var vn0 = new THREE.Vector3();
var vn1 = new THREE.Vector3();
var vn2 = new THREE.Vector3();
var vt0 = new THREE.Vector2();
var vt1 = new THREE.Vector2();
var vt2 = new THREE.Vector2();

var changeMaterialColor = false;
var changeMaterialRoughness = false;

function onMouseWheel(event) {

        //event.preventDefault();
        event.stopPropagation();

        if (event.deltaY > 0) {

                increaseFOV = true;

        } else if (event.deltaY < 0) {

                decreaseFOV = true;

        }

}
function onWindowResize(event) {

        windowIsBeingResized = true;
        SCREEN_WIDTH = document.body.clientWidth -65 ;
        SCREEN_HEIGHT = document.body.clientHeight-10;
        renderer.setPixelRatio(pixelRatio);
        renderer.setSize(SCREEN_WIDTH, SCREEN_HEIGHT);

        pathTracingUniforms.uResolution.value.x = context.drawingBufferWidth;
        pathTracingUniforms.uResolution.value.y = context.drawingBufferHeight;

        pathTracingRenderTarget.setSize(context.drawingBufferWidth, context.drawingBufferHeight);
        screenCopyRenderTarget.setSize(context.drawingBufferWidth, context.drawingBufferHeight);

        worldCamera.aspect = SCREEN_WIDTH / SCREEN_HEIGHT;
        worldCamera.updateProjectionMatrix();

        // the following scales all scene objects by the worldCamera's field of view,
        // taking into account the screen aspect ratio and multiplying the uniform uULen,
        // the x-coordinate, by this ratio
        fovScale = worldCamera.fov * 0.5 * (Math.PI / 180.0);
        pathTracingUniforms.uVLen.value = Math.tan(fovScale);
        pathTracingUniforms.uULen.value = pathTracingUniforms.uVLen.value * worldCamera.aspect;

} // end function onWindowResize( event )
function initTHREEjs() {
        canvas = document.createElement('canvas');
        renderer = new THREE.WebGLRenderer({ canvas: canvas, context: canvas.getContext('webgl2') });
        renderer.debug.checkShaderErrors = true;
        renderer.autoClear = false;
        renderer.toneMapping = THREE.ReinhardToneMapping;
        //renderer.toneMapping = THREE.ACESFilmicToneMapping;
        context = renderer.getContext();
        context.getExtension('EXT_color_buffer_float');
        container = document.getElementById('main-wrapper');
        container.appendChild(renderer.domElement);

        clock = new THREE.Clock();

        pathTracingScene = new THREE.Scene();
        screenCopyScene = new THREE.Scene();
        screenOutputScene = new THREE.Scene();

        // quadCamera is simply the camera to help render the full screen quad (2 triangles),
        // hence the name.  It is an Orthographic camera that sits facing the view plane, which serves as
        // the window into our 3d world. This camera will not move or rotate for the duration of the app.
        quadCamera = new THREE.OrthographicCamera(-1, 1, 1, -1, 0, 1);
        screenCopyScene.add(quadCamera);
        screenOutputScene.add(quadCamera);

        // worldCamera is the dynamic camera 3d object that will be positioned, oriented and
        // constantly updated inside the 3d scene.  Its view will ultimately get passed back to the
        // stationary quadCamera, which renders the scene to a fullscreen quad (made up of 2 large triangles).
        worldCamera = new THREE.PerspectiveCamera(60, document.body.clientWidth / document.body.clientHeight, 1, 1000);
        pathTracingScene.add(worldCamera);

        controls = new CameraControls(worldCamera);
        cameraControlsObject = controls.getObject();
        cameraControlsYawObject = controls.getYawObject();
        cameraControlsPitchObject = controls.getPitchObject();
        pathTracingScene.add(cameraControlsObject);

        // setup render targets...
        pathTracingRenderTarget = new THREE.WebGLRenderTarget(context.drawingBufferWidth, context.drawingBufferHeight, {
                minFilter: THREE.NearestFilter,
                magFilter: THREE.NearestFilter,
                format: THREE.RGBAFormat,
                type: THREE.FloatType,
                depthBuffer: false,
                stencilBuffer: false
        });
        pathTracingRenderTarget.texture.generateMipmaps = false;

        screenCopyRenderTarget = new THREE.WebGLRenderTarget(context.drawingBufferWidth, context.drawingBufferHeight, {
                minFilter: THREE.NearestFilter,
                magFilter: THREE.NearestFilter,
                format: THREE.RGBAFormat,
                type: THREE.FloatType,
                depthBuffer: false,
                stencilBuffer: false
        });
        screenCopyRenderTarget.texture.generateMipmaps = false;

        // blueNoise texture used in all demos
        blueNoiseTexture = new THREE.TextureLoader().load('textures/BlueNoise_RGBA256.png');
        blueNoiseTexture.wrapS = THREE.RepeatWrapping;
        blueNoiseTexture.wrapT = THREE.RepeatWrapping;
        blueNoiseTexture.flipY = false;
        blueNoiseTexture.minFilter = THREE.NearestFilter;
        blueNoiseTexture.magFilter = THREE.NearestFilter;
        blueNoiseTexture.generateMipmaps = false;


        EPS_intersect = mouseControl ? 0.01 : 1.0; // less precision on mobile

        // set camera's field of view
        worldCamera.fov = 60;
        focusDistance = 80.0;

        // position and orient camera
        cameraControlsObject.position.set(20, 30, 30);
        // look slightly downward
        //cameraControlsPitchObject.rotation.x = -0.2;

        total_number_of_triangles = modelMesh.geometry.attributes.position.array.length / 9;
        console.log("Triangle count:" + total_number_of_triangles);

        totalWork = new Uint32Array(total_number_of_triangles);

        triangle_array = new Float32Array(2048 * 2048 * 4);
        // 2048 = width of texture, 2048 = height of texture, 4 = r,g,b, and a components

        aabb_array = new Float32Array(2048 * 2048 * 4);
        // 2048 = width of texture, 2048 = height of texture, 4 = r,g,b, and a components


        var triangle_b_box_min = new THREE.Vector3();
        var triangle_b_box_max = new THREE.Vector3();
        var triangle_b_box_centroid = new THREE.Vector3();


        var vpa = new Float32Array(modelMesh.geometry.attributes.position.array);
        var vna = new Float32Array(modelMesh.geometry.attributes.normal.array);
        var vta = null;
        var modelHasUVs = false;
        if (modelMesh.geometry.attributes.uv !== undefined) {
                vta = new Float32Array(modelMesh.geometry.attributes.uv.array);
                modelHasUVs = true;
        }

        var materialNumber = 0;

        for (let i = 0; i < total_number_of_triangles; i++) {

                triangle_b_box_min.set(Infinity, Infinity, Infinity);
                triangle_b_box_max.set(-Infinity, -Infinity, -Infinity);

                for (let j = 0; j < pathTracingMaterialList.length; j++) {
                        if (i < triangleMaterialMarkers[j]) {
                                materialNumber = j;
                                break;
                        }
                }

                // record vertex texture coordinates (UVs)
                if (modelHasUVs) {
                        vt0.set( vta[6 * i + 0], vta[6 * i + 1] );
                        vt1.set( vta[6 * i + 2], vta[6 * i + 3] );
                        vt2.set( vta[6 * i + 4], vta[6 * i + 5] );
                }
                else {
                        vt0.set( -1, -1 );
                        vt1.set( -1, -1 );
                        vt2.set( -1, -1 );
                }

                // record vertex normals
                vn0.set( vna[9 * i + 0], vna[9 * i + 1], vna[9 * i + 2] ).normalize();
                vn1.set( vna[9 * i + 3], vna[9 * i + 4], vna[9 * i + 5] ).normalize();
                vn2.set( vna[9 * i + 6], vna[9 * i + 7], vna[9 * i + 8] ).normalize();

                // record vertex positions
                vp0.set( vpa[9 * i + 0], vpa[9 * i + 1], vpa[9 * i + 2] );
                vp1.set( vpa[9 * i + 3], vpa[9 * i + 4], vpa[9 * i + 5] );
                vp2.set( vpa[9 * i + 6], vpa[9 * i + 7], vpa[9 * i + 8] );

                vp0.multiplyScalar(modelScale);
                vp1.multiplyScalar(modelScale);
                vp2.multiplyScalar(modelScale);

                vp0.add(modelPositionOffset);
                vp1.add(modelPositionOffset);
                vp2.add(modelPositionOffset);

                //slot 0
                triangle_array[32 * i +  0] = vp0.x; // r or x
                triangle_array[32 * i +  1] = vp0.y; // g or y
                triangle_array[32 * i +  2] = vp0.z; // b or z
                triangle_array[32 * i +  3] = vp1.x; // a or w

                //slot 1
                triangle_array[32 * i +  4] = vp1.y; // r or x
                triangle_array[32 * i +  5] = vp1.z; // g or y
                triangle_array[32 * i +  6] = vp2.x; // b or z
                triangle_array[32 * i +  7] = vp2.y; // a or w

                //slot 2
                triangle_array[32 * i +  8] = vp2.z; // r or x
                triangle_array[32 * i +  9] = vn0.x; // g or y
                triangle_array[32 * i + 10] = vn0.y; // b or z
                triangle_array[32 * i + 11] = vn0.z; // a or w

                //slot 3
                triangle_array[32 * i + 12] = vn1.x; // r or x
                triangle_array[32 * i + 13] = vn1.y; // g or y
                triangle_array[32 * i + 14] = vn1.z; // b or z
                triangle_array[32 * i + 15] = vn2.x; // a or w

                //slot 4
                triangle_array[32 * i + 16] = vn2.y; // r or x
                triangle_array[32 * i + 17] = vn2.z; // g or y
                triangle_array[32 * i + 18] = vt0.x; // b or z
                triangle_array[32 * i + 19] = vt0.y; // a or w

                //slot 5
                triangle_array[32 * i + 20] = vt1.x; // r or x
                triangle_array[32 * i + 21] = vt1.y; // g or y
                triangle_array[32 * i + 22] = vt2.x; // b or z
                triangle_array[32 * i + 23] = vt2.y; // a or w

                // the remaining slots are used for PBR material properties

                //slot 6
                triangle_array[32 * i + 24] = pathTracingMaterialList[materialNumber].type; // r or x
                triangle_array[32 * i + 25] = pathTracingMaterialList[materialNumber].color.r; // g or y
                triangle_array[32 * i + 26] = pathTracingMaterialList[materialNumber].color.g; // b or z
                triangle_array[32 * i + 27] = pathTracingMaterialList[materialNumber].color.b; // a or w

                //slot 7
                triangle_array[32 * i + 28] = pathTracingMaterialList[materialNumber].albedoTextureID; // r or x
                triangle_array[32 * i + 29] = 0; // g or y
                triangle_array[32 * i + 30] = 0; // b or z
                triangle_array[32 * i + 31] = 0; // a or w

                triangle_b_box_min.copy(triangle_b_box_min.min(vp0));
                triangle_b_box_max.copy(triangle_b_box_max.max(vp0));
                triangle_b_box_min.copy(triangle_b_box_min.min(vp1));
                triangle_b_box_max.copy(triangle_b_box_max.max(vp1));
                triangle_b_box_min.copy(triangle_b_box_min.min(vp2));
                triangle_b_box_max.copy(triangle_b_box_max.max(vp2));

                triangle_b_box_centroid.set((triangle_b_box_min.x + triangle_b_box_max.x) * 0.5,
                    (triangle_b_box_min.y + triangle_b_box_max.y) * 0.5,
                    (triangle_b_box_min.z + triangle_b_box_max.z) * 0.5);

                aabb_array[9 * i + 0] = triangle_b_box_min.x;
                aabb_array[9 * i + 1] = triangle_b_box_min.y;
                aabb_array[9 * i + 2] = triangle_b_box_min.z;
                aabb_array[9 * i + 3] = triangle_b_box_max.x;
                aabb_array[9 * i + 4] = triangle_b_box_max.y;
                aabb_array[9 * i + 5] = triangle_b_box_max.z;
                aabb_array[9 * i + 6] = triangle_b_box_centroid.x;
                aabb_array[9 * i + 7] = triangle_b_box_centroid.y;
                aabb_array[9 * i + 8] = triangle_b_box_centroid.z;

                totalWork[i] = i;
        }


        // Build the BVH acceleration structure, which places a bounding box ('root' of the tree) around all of the
        // triangles of the entire mesh, then subdivides each box into 2 smaller boxes.  It continues until it reaches 1 triangle,
        // which it then designates as a 'leaf'
        BVH_Build_Iterative(totalWork, aabb_array);


        triangleDataTexture = new THREE.DataTexture(triangle_array,
            2048,
            2048,
            THREE.RGBAFormat,
            THREE.FloatType,
            THREE.Texture.DEFAULT_MAPPING,
            THREE.ClampToEdgeWrapping,
            THREE.ClampToEdgeWrapping,
            THREE.NearestFilter,
            THREE.NearestFilter,
            1,
            THREE.LinearEncoding);

        triangleDataTexture.flipY = false;
        triangleDataTexture.generateMipmaps = false;
        triangleDataTexture.needsUpdate = true;

        aabbDataTexture = new THREE.DataTexture(aabb_array,
            2048,
            2048,
            THREE.RGBAFormat,
            THREE.FloatType,
            THREE.Texture.DEFAULT_MAPPING,
            THREE.ClampToEdgeWrapping,
            THREE.ClampToEdgeWrapping,
            THREE.NearestFilter,
            THREE.NearestFilter,
            1,
            THREE.LinearEncoding);

        aabbDataTexture.flipY = false;
        aabbDataTexture.generateMipmaps = false;
        aabbDataTexture.needsUpdate = true;
        hdrChange(0);

        // setup screen-size quad geometry and shaders....

        // this full-screen quad mesh performs the path tracing operations and produces a screen-sized image
        pathTracingGeometry = new THREE.PlaneBufferGeometry(2, 2);
        pathTracingUniforms = {
                tPreviousTexture: { type: "t", value: screenCopyRenderTarget.texture },
                tBlueNoiseTexture: { type: "t", value: blueNoiseTexture },
                uCameraIsMoving: { type: "b1", value: false },
                uSceneIsDynamic: { type: "b1", value: sceneIsDynamic },

                uEPS_intersect: { type: "f", value: EPS_intersect },
                uTime: { type: "f", value: 0.0 },
                uSampleCounter: { type: "f", value: 0.0 },
                uFrameCounter: { type: "f", value: 1.0 },
                uULen: { type: "f", value: 1.0 },
                uVLen: { type: "f", value: 1.0 },
                uApertureSize: { type: "f", value: 0.0 },
                uFocusDistance: { type: "f", value: focusDistance },
                uSunAngularDiameterCos: { type: "f", value: sunAngularDiameterCos },

                uResolution: { type: "v2", value: new THREE.Vector2() },
                uRandomVec2: { type: "v2", value: new THREE.Vector2() },

                uCameraMatrix: { type: "m4", value: new THREE.Matrix4() },
                tTriangleTexture:{ type: "t", value: triangleDataTexture },
                tAABBTexture:{ type: "t", value: aabbDataTexture },
                tHDRTexture:{ type: "t", value: hdrTexture },
                uMaterialType:{ type: "i", value: MaterialType},
                uHDRI_Exposure:{ type: "f", value:HDRI_ExposureValue},
                uRoughness:{ type: "f", value: 0.0 },
                uSphereSize:{ type: "f", value: SphereSize },
                uMaterialColor:{ type: "v3", value: new THREE.Color() }
        };

        pathTracingDefines = {
                //NUMBER_OF_TRIANGLES: total_number_of_triangles
        };

        // load vertex and fragment shader files that are used in the pathTracing material, mesh and scene
        fileLoader.load('shaders/common_PathTracing_Vertex.glsl', function (shaderText) {
                pathTracingVertexShader = shaderText;
                fileLoader.load('shaders/HDRI_Environment_Fragment.glsl', function (shaderText) {

                        pathTracingFragmentShader = shaderText;

                        pathTracingMaterial = new THREE.ShaderMaterial({
                                uniforms: pathTracingUniforms,
                                defines: pathTracingDefines,
                                vertexShader: pathTracingVertexShader,
                                fragmentShader: pathTracingFragmentShader,
                                depthTest: false,
                                depthWrite: false
                        });

                        pathTracingMesh = new THREE.Mesh(pathTracingGeometry, pathTracingMaterial);
                        pathTracingScene.add(pathTracingMesh);

                        // the following keeps the large scene ShaderMaterial quad right in front
                        //   of the camera at all times. This is necessary because without it, the scene
                        //   quad will fall out of view and get clipped when the camera rotates past 180 degrees.
                        worldCamera.add(pathTracingMesh);

                });
        });
        // this full-screen quad mesh copies the image output of the pathtracing shader and feeds it back in to that shader as a 'previousTexture'
        screenCopyGeometry = new THREE.PlaneBufferGeometry(2, 2);

        screenCopyUniforms = {
                tPathTracedImageTexture: { type: "t", value: null }
        };

        fileLoader.load('shaders/ScreenCopy_Fragment.glsl', function (shaderText)
        {
                screenCopyFragmentShader = shaderText;

                screenCopyMaterial = new THREE.ShaderMaterial({
                        uniforms: screenCopyUniforms,
                        vertexShader: pathTracingVertexShader,
                        fragmentShader: screenCopyFragmentShader,
                        depthWrite: false,
                        depthTest: false
                });

                screenCopyMaterial.uniforms.tPathTracedImageTexture.value = pathTracingRenderTarget.texture;

                screenCopyMesh = new THREE.Mesh(screenCopyGeometry, screenCopyMaterial);
                screenCopyScene.add(screenCopyMesh);
        });


        // this full-screen quad mesh takes the image output of the path tracing shader (which is a continuous blend of the previous frame and current frame),
        // and applies gamma correction (which brightens the entire image), and then displays the final accumulated rendering to the screen
        screenOutputGeometry = new THREE.PlaneBufferGeometry(2, 2);

        screenOutputUniforms = {
                uOneOverSampleCounter: { type: "f", value: 0.0 },
                tPathTracedImageTexture: { type: "t", value: null }
        };

        fileLoader.load('shaders/ScreenOutput_Fragment.glsl', function (shaderText)
        {
                screenOutputFragmentShader = shaderText;

                screenOutputMaterial = new THREE.ShaderMaterial({
                        uniforms: screenOutputUniforms,
                        vertexShader: pathTracingVertexShader,
                        fragmentShader: screenOutputFragmentShader,
                        depthWrite: false,
                        depthTest: false
                });

                screenOutputMaterial.uniforms.tPathTracedImageTexture.value = pathTracingRenderTarget.texture;
                screenOutputMesh = new THREE.Mesh(screenOutputGeometry, screenOutputMaterial);
                screenOutputScene.add(screenOutputMesh);
        });


        // this 'jumpstarts' the initial dimensions and parameters for the window and renderer
        onWindowResize();

        // everything is set up, now we can start animating
        animate();

} // end function initTHREEjs()
function MaterialObject() {
// a list of material types and their corresponding numbers are found in the 'pathTracingCommon.js' file
        this.type = 1; // default is '1': diffuse type
        this.albedoTextureID = -1; // which diffuse map to use for model's color / '-1' = no textures are used
        this.color = new THREE.Color(1.0, 1.0, 1.0); // takes on different meanings, depending on 'type' above
        this.roughness = 0.0; // 0.0 to 1.0 range, perfectly smooth to extremely rough
        this.metalness = 0.0; // 0.0 to 1.0 range, usually either 0 or 1, either non-metal or metal
        this.opacity = 1.0;   // 0.0 to 1.0 range, fully transparent to fully opaque
        this.refractiveIndex = 1.0; // 1.0=air, 1.33=water, 1.4=clearCoat, 1.5=glass, etc.
}
function load_GLTF_Model() {

        gltfLoader.load("models/dog/dog.obj", function( meshGroup ) { // Triangles: 100,000
                if (meshGroup.scene)
                        meshGroup = meshGroup.scene;

                meshGroup.traverse( function ( child ) {

                        if ( child.isMesh ) {

                                let mat = new MaterialObject();
                                mat.type = 1;
                                mat.albedoTextureID = -1;
                                //mat.color = child.material.color;
                                mat.roughness = child.material.roughness || 0.0;
                                mat.metalness = child.material.metalness || 0.0;
                                mat.opacity = child.material.opacity || 1.0;
                                mat.refractiveIndex = 1.0;
                                pathTracingMaterialList.push(mat);
                                triangleMaterialMarkers.push(child.geometry.attributes.position.array.length / 9);
                                meshList.push(child);
                        }
                } );

                modelMesh = meshList[0].clone();

                for (let i = 0; i < meshList.length; i++) {
                        geoList.push(meshList[i].geometry);
                }

                modelMesh.geometry = THREE.BufferGeometryUtils.mergeBufferGeometries(geoList);

                if (modelMesh.geometry.index)
                        modelMesh.geometry = modelMesh.geometry.toNonIndexed();

                modelMesh.geometry.center();

                for (let i = 1; i < triangleMaterialMarkers.length; i++) {
                        triangleMaterialMarkers[i] += triangleMaterialMarkers[i-1];
                }

                for (let i = 0; i < meshList.length; i++) {
                        if (meshList[i].material.map != undefined)
                                uniqueMaterialTextures.push(meshList[i].material.map);
                }

                for (let i = 0; i < uniqueMaterialTextures.length; i++) {
                        for (let j = i + 1; j < uniqueMaterialTextures.length; j++) {
                                if (uniqueMaterialTextures[i].image.src == uniqueMaterialTextures[j].image.src) {
                                        uniqueMaterialTextures.splice(j, 1);
                                        j -= 1;
                                }
                        }
                }

                for (let i = 0; i < meshList.length; i++) {
                        if (meshList[i].material.map != undefined) {
                                for (let j = 0; j < uniqueMaterialTextures.length; j++) {
                                        if (meshList[i].material.map.image.src == uniqueMaterialTextures[j].image.src) {
                                                pathTracingMaterialList[i].albedoTextureID = j;
                                        }
                                }
                        }
                }

                // ********* different GLTF Model Settings **********

                // settings for StanfordDragon model
                modelScale = 0.3;
                modelPositionOffset.set(0, 22, -40);
                initTHREEjs();

        });

} // end function load_GLTF_Model()
function UploadModel() {
        gltfLoader.load(ModelPath[0], function( meshGroup ) {
                if (meshGroup.scene)
                        meshGroup = meshGroup.scene;
                meshGroup.traverse( function ( child ) {

                        if ( child.isMesh ) {

                                let mat = new MaterialObject();
                                mat.type = 1;
                                mat.albedoTextureID = -1;
                                //mat.color = child.material.color;
                                mat.roughness = child.material.roughness || 0.0;
                                mat.metalness = child.material.metalness || 0.0;
                                mat.opacity = child.material.opacity || 1.0;
                                mat.refractiveIndex = 1.0;
                                pathTracingMaterialList.push(mat);
                                triangleMaterialMarkers.push(child.geometry.attributes.position.array.length / 9);
                                meshList.push(child);
                        }
                } );

                modelMesh = meshList[0].clone();
                for (let i = 0; i < meshList.length; i++) {
                        geoList.push(meshList[i].geometry);
                }

                modelMesh.geometry = THREE.BufferGeometryUtils.mergeBufferGeometries(geoList);

                if (modelMesh.geometry.index)
                        modelMesh.geometry = modelMesh.geometry.toNonIndexed();

                modelMesh.geometry.center();

                for (let i = 1; i < triangleMaterialMarkers.length; i++) {
                        triangleMaterialMarkers[i] += triangleMaterialMarkers[i-1];
                }

                for (let i = 0; i < meshList.length; i++) {
                        if (meshList[i].material.map != undefined)
                                uniqueMaterialTextures.push(meshList[i].material.map);
                }

                for (let i = 0; i < uniqueMaterialTextures.length; i++) {
                        for (let j = i + 1; j < uniqueMaterialTextures.length; j++) {
                                if (uniqueMaterialTextures[i].image.src == uniqueMaterialTextures[j].image.src) {
                                        uniqueMaterialTextures.splice(j, 1);
                                        j -= 1;
                                }
                        }
                }

                for (let i = 0; i < meshList.length; i++) {
                        if (meshList[i].material.map != undefined) {
                                for (let j = 0; j < uniqueMaterialTextures.length; j++) {
                                        if (meshList[i].material.map.image.src == uniqueMaterialTextures[j].image.src) {
                                                pathTracingMaterialList[i].albedoTextureID = j;
                                        }
                                }
                        }
                }
                modelScale = 2.0;
                modelPositionOffset.set(0, 28, -40);

                $(canvas).remove();
                initTHREEjs(); // boilerplate: init necessary three.js items and scene/demo-specific objects

        });

} // end function UploadModel()
load_GLTF_Model();
function animate() {

        frameTime = clock.getDelta();

        elapsedTime = clock.getElapsedTime() % 1000;

        // reset flags
        cameraIsMoving = false;

        if (windowIsBeingResized) {
                cameraIsMoving = true;
                windowIsBeingResized = false;
        }

        // check user controls
        if (mouseControl) {
                // movement detected
                if (oldYawRotation != cameraControlsYawObject.rotation.y ||
                    oldPitchRotation != cameraControlsPitchObject.rotation.x) {

                        cameraIsMoving = true;
                }

                // save state for next frame
                oldYawRotation = cameraControlsYawObject.rotation.y;
                oldPitchRotation = cameraControlsPitchObject.rotation.x;

        } // end if (mouseControl)

        // this gives us a vector in the direction that the camera is pointing,
        // which will be useful for moving the camera 'forward' and shooting projectiles in that direction
        controls.getDirection(cameraDirectionVector);
        cameraDirectionVector.normalize();
        controls.getUpVector(cameraUpVector);
        cameraUpVector.normalize();
        controls.getRightVector(cameraRightVector);
        cameraRightVector.normalize();

        // the following gives us a rotation quaternion (4D vector), which will be useful for
        // rotating scene objects to match the camera's rotation
        worldCamera.getWorldQuaternion(cameraWorldQuaternion);

        if (useGenericInput) {

                // allow flying camera
                if ((keyboard.pressed('W')) && !(keyboard.pressed('S'))) {

                        cameraControlsObject.position.add(cameraDirectionVector.multiplyScalar(camFlightSpeed * frameTime));
                        cameraIsMoving = true;
                }
                if ((keyboard.pressed('S')) && !(keyboard.pressed('W'))) {

                        cameraControlsObject.position.sub(cameraDirectionVector.multiplyScalar(camFlightSpeed * frameTime));
                        cameraIsMoving = true;
                }
                if ((keyboard.pressed('A')) && !(keyboard.pressed('D'))) {

                        cameraControlsObject.position.sub(cameraRightVector.multiplyScalar(camFlightSpeed * frameTime));
                        cameraIsMoving = true;
                }
                if ((keyboard.pressed('D')) && !(keyboard.pressed('A'))) {

                        cameraControlsObject.position.add(cameraRightVector.multiplyScalar(camFlightSpeed * frameTime));
                        cameraIsMoving = true;
                }
                if (keyboard.pressed('Q') && !keyboard.pressed('Z')) {

                        cameraControlsObject.position.add(cameraUpVector.multiplyScalar(camFlightSpeed * frameTime));
                        cameraIsMoving = true;
                }
                if (keyboard.pressed('Z') && !keyboard.pressed('Q')) {

                        cameraControlsObject.position.sub(cameraUpVector.multiplyScalar(camFlightSpeed * frameTime));
                        cameraIsMoving = true;
                }
                if ((keyboard.pressed('up')) && !(keyboard.pressed('down'))) {

                        increaseFocusDist = true;
                }
                if ((keyboard.pressed('down')) && !(keyboard.pressed('up'))) {

                        decreaseFocusDist = true;
                }
                if (keyboard.pressed('right') && !keyboard.pressed('left')) {

                        increaseAperture = true;
                }
                if (keyboard.pressed('left') && !keyboard.pressed('right')) {

                        decreaseAperture = true;
                }
                if (increaseFOV) {
                        worldCamera.fov++;
                        if (worldCamera.fov > 150)
                                worldCamera.fov = 150;
                        fovScale = worldCamera.fov * 0.5 * (Math.PI / 180.0);
                        pathTracingUniforms.uVLen.value = Math.tan(fovScale);
                        pathTracingUniforms.uULen.value = pathTracingUniforms.uVLen.value * worldCamera.aspect;

                        cameraIsMoving = true;
                        increaseFOV = false;
                }
                if (decreaseFOV) {
                        worldCamera.fov--;
                        if (worldCamera.fov < 1)
                                worldCamera.fov = 1;
                        fovScale = worldCamera.fov * 0.5 * (Math.PI / 180.0);
                        pathTracingUniforms.uVLen.value = Math.tan(fovScale);
                        pathTracingUniforms.uULen.value = pathTracingUniforms.uVLen.value * worldCamera.aspect;

                        cameraIsMoving = true;
                        decreaseFOV = false;
                }

                if (increaseFocusDist) {
                        focusDistance += 1;
                        pathTracingUniforms.uFocusDistance.value = focusDistance;
                        cameraIsMoving = true;
                        increaseFocusDist = false;
                }
                if (decreaseFocusDist) {
                        focusDistance -= 1;
                        if (focusDistance < 1)
                                focusDistance = 1;
                        pathTracingUniforms.uFocusDistance.value = focusDistance;
                        cameraIsMoving = true;
                        decreaseFocusDist = false;
                }

                if (increaseAperture) {
                        apertureSize += 0.1;
                        if (apertureSize > 100.0)
                                apertureSize = 100.0;
                        pathTracingUniforms.uApertureSize.value = apertureSize;
                        cameraIsMoving = true;
                        increaseAperture = false;
                }
                if (decreaseAperture) {
                        apertureSize -= 0.1;
                        if (apertureSize < 0.0)
                                apertureSize = 0.0;
                        pathTracingUniforms.uApertureSize.value = apertureSize;
                        cameraIsMoving = true;
                        decreaseAperture = false;
                }
                if(lock){
                        cameraIsMoving = true;
                        lock = false;
                }

        } // end if (useGenericInput)
        /**
         * 渲染控制。
         **/
        document.getElementById("Render_Control").oninput = function () {
                pathTracingUniforms.uMaterialType.value =this.value;
                MaterialType =  document.getElementById("Render_Control").value;
                lock = true;
        }
        /**
         * Mat控制。
         **/
        document.getElementById("Mat_TpyeChange").oninput = function () {
                pathTracingUniforms.uMaterialType.value =this.value;
                MaterialType =  document.getElementById("Mat_TpyeChange").value;
                lock = true;
        }
        /**
         * DAE上传。
         **/
        document.getElementById("Mod_Upload").onchange = function (source) {
                var file = this.files[0];
                if(window.FileReader) {
                        var fr = new FileReader();
                        fr.onloadend = function() {
                                ModelPath.unshift(this.result)
                                UploadModel()
                        };
                        fr.readAsDataURL(file);
                }
        }
        /**
         /**
         * HDR上传。
         **/
        document.getElementById("HDRI_Upload").onchange = function (source) {
                document.getElementById("HDRI_Change").max = hdrPath.length;
                var file = this.files[0];
                if(window.FileReader) {
                        var fr = new FileReader();
                        fr.onloadend = function() {
                                hdrPath.unshift(this.result)
                                hdrTexture = hdrLoader.load( hdrPath[0], function ( texture, textureData ) {
                                        texture.encoding = THREE.LinearEncoding;
                                        texture.minFilter = THREE.LinearFilter;
                                        texture.magFilter = THREE.LinearFilter;
                                        texture.generateMipmaps = false;
                                        texture.flipY = true;
                                } );
                                pathTracingUniforms.tHDRTexture = { type: "t", value: hdrTexture };
                                decreaseAperture = true;
                        };
                        fr.readAsDataURL(file);
                }
        }
        /**
         * HDRI更换。
         **/
        document.getElementById("HDRI_Change").oninput = function() {
                hdrTexture = hdrLoader.load( hdrPath[this.value], function ( texture, textureData ) {
                        texture.encoding = THREE.LinearEncoding;
                        texture.minFilter = THREE.LinearFilter;
                        texture.magFilter = THREE.LinearFilter;
                        texture.generateMipmaps = false;
                        texture.flipY = true;
                        pathTracingUniforms.tHDRTexture = { type: "t", value: hdrTexture };
                        lock =true;
                });
        }
        /**
         * HDRI曝光。
         **/
        document.getElementById("HDRI_Exposure").oninput = function() {
                renderer.toneMappingExposure =this.value;
                pathTracingUniforms.uHDRI_Exposure.value = this.value;
                HDRI_ExposureValue =  document.getElementById("HDRI_Exposure").value;
                lock = true;
        }
        /**
         * 截图。
         **/
        document.getElementById("bt1").onclick = function() {
                saveAsImage('test');
        }
        /**
         * 球体大小控制。
         **/
        document.getElementById("SphereSize").oninput = function(){
                pathTracingUniforms.uSphereSize.value = this.value;
                SphereSize =  document.getElementById("SphereSize").value;
                lock = true;
        }
        /**
         * 景深控制器。
         **/
        document.getElementById("apertureSize").oninput = function(){
                apertureSize = this.value;
                pathTracingUniforms.uApertureSize.value = apertureSize;
                cameraIsMoving = true;
                decreaseAperture = true;
        }

        // INFO
        document.getElementById('cameraInfo').innerHTML = "FOV: " + worldCamera.fov + " / 景深: " + apertureSize.toFixed(2) +
            "焦距: " + focusDistance + "<br>" + "采样: " + sampleCounter;

        // now update uniforms that are common to all scenes
        if (!cameraIsMoving)
        {
                if (sceneIsDynamic)
                        sampleCounter = 1.0; // reset for continuous updating of image
                else sampleCounter += 1.0; // for progressive refinement of image

                frameCounter += 1.0;

                cameraRecentlyMoving = false;
        }

        if (cameraIsMoving)
        {
                sampleCounter = 1.0;
                frameCounter += 1.0;

                if (!cameraRecentlyMoving)
                {
                        frameCounter = 1.0;
                        cameraRecentlyMoving = true;
                }
        }

        pathTracingUniforms.uTime.value = elapsedTime;
        pathTracingUniforms.uCameraIsMoving.value = cameraIsMoving;
        pathTracingUniforms.uSampleCounter.value = sampleCounter;
        pathTracingUniforms.uFrameCounter.value = frameCounter;
        pathTracingUniforms.uRandomVec2.value.set(Math.random(), Math.random());

        // CAMERA
        cameraControlsObject.updateMatrixWorld(true);
        worldCamera.updateMatrixWorld(true);
        pathTracingUniforms.uCameraMatrix.value.copy(worldCamera.matrixWorld);

        // PROGRESSIVE SAMPLE WEIGHT (reduces intensity of each successive animation frame's image)
        screenOutputUniforms.uOneOverSampleCounter.value = 1.0 / sampleCounter;


        // RENDERING in 3 steps

        // STEP 1
        // Perform PathTracing and Render(save) into pathTracingRenderTarget, a full-screen texture.
        // Read previous screenCopyRenderTarget(via texelFetch inside fragment shader) to use as a new starting point to blend with
        renderer.setRenderTarget(pathTracingRenderTarget);
        renderer.render(pathTracingScene, worldCamera);

        // STEP 2
        // Render(copy) the pathTracingScene output(pathTracingRenderTarget above) into screenCopyRenderTarget.
        // This will be used as a new starting point for Step 1 above (essentially creating ping-pong buffers)
        renderer.setRenderTarget(screenCopyRenderTarget);
        renderer.render(screenCopyScene, quadCamera);

        // STEP 3
        // Render full screen quad with generated pathTracingRenderTarget in STEP 1 above.
        // After applying tonemapping and gamma-correction to the image, it will be shown on the screen as the final accumulated output
        renderer.setRenderTarget(null);
        renderer.render(screenOutputScene, quadCamera);

        requestAnimationFrame(animate);

} // end function animate()
function saveAsImage (filename){
        renderer.render(screenOutputScene, quadCamera);
        let save_link = document.createElementNS('http://www.w3.org/1999/xhtml', 'a');
        save_link.href = renderer.domElement.toDataURL("image/png" );
        save_link.download = filename;
        let event = document.createEvent('MouseEvents');
        event.initMouseEvent('click', true, false, window, 0, 0, 0, 0, 0, false, false, false, false, 0, null);
        save_link.dispatchEvent(event);
}
function hdrChange(size,path){
        hdrLoader = new THREE.RGBELoader();
        hdrPath.push('textures/symmetrical_garden_2k.hdr','textures/cloud_layers_2k.hdr','textures/daytime.hdr');
        hdrTexture = hdrLoader.load( hdrPath[size], function ( texture, textureData ) {
                texture.encoding = THREE.LinearEncoding;
                texture.minFilter = THREE.LinearFilter;
                texture.magFilter = THREE.LinearFilter;
                texture.generateMipmaps = false;
                texture.flipY = true;
        } );
}
