
import * as THREE from 'three';
import { GLTFLoader, OrbitControls } from 'three/examples/jsm/Addons.js';
import * as FLD from '@tensorflow-models/face-landmarks-detection';
import { Utils } from './Utils';
import { BarControls } from './BarControls';

const loader = new THREE.ImageLoader();

const triangles = Array<number>();
const light = new THREE.DirectionalLight(0xffffff, 1.5);

const geometry = new THREE.BufferGeometry();

export class ModelViewer {
    renderer: THREE.WebGLRenderer;
    camera: THREE.PerspectiveCamera;
    scene: THREE.Scene;
    controls: OrbitControls;
    barControls: BarControls;
    detector?: FLD.FaceLandmarksDetector;
    stream: MediaStream | null = null;

    created = false;

    constructor(private canvas: HTMLCanvasElement) {
        const dpr = Math.min(2, window.devicePixelRatio);
        const renderer = new THREE.WebGLRenderer({ canvas: canvas, antialias: true });
        this.renderer = renderer;
        renderer.setClearColor(0xd3d3d3, 1);
        renderer.setPixelRatio(dpr);
        renderer.autoClear = false;

        const aspect = canvas.width / canvas.height;
        const camera = new THREE.PerspectiveCamera(75, aspect, 0.1, 10000);
        camera.position.z = 10;
        camera.position.y = 5;
        this.camera = camera;
        const scene = new THREE.Scene();
        this.scene = scene;
        camera.lookAt(scene.position);
        let controls = new OrbitControls(camera, renderer.domElement);
        this.controls = controls;

        this.barControls = new BarControls(renderer.domElement);

        this.resize();
    }

    async init() {
        let scene = this.scene;
        scene.add(new THREE.HemisphereLight(0xaaaaaa, 0x444444, 1));
        light.position.set(1000, 1000, 1000);
        scene.add(light);
        this.scene.add(this.barControls.selected);
        this.initMap();

        const model = FLD.SupportedModels.MediaPipeFaceMesh;
        const detectorConfig: any = {
            runtime: 'mediapipe', // or 'tfjs'
            //solutionPath: 'https://cdn.jsdelivr.net/npm/@mediapipe/face_mesh',
            solutionPath: '/face_mesh',
        }
        console.log('-1-->', this.detector);
        this.detector = await FLD.createDetector(model, detectorConfig);
        console.log('-2-->', this.detector);
    }


    initMap() {

        const contour = FLD.util.getKeypointIndexByContour(FLD.SupportedModels.MediaPipeFaceMesh);
	for(let x in contour) {
	console.log('util',x, contour[x]);
	}

        const pairs = FLD.util.getAdjacentPairs(FLD.SupportedModels.MediaPipeFaceMesh);
        const map = new Map<number, Array<number>>();
        for (let pair of pairs) {
            let a = pair[0];
            let b = pair[1];
            let array = map.get(a);
            if (!array) {
                array = new Array<number>();
                map.set(a, array);
            }
            array.push(b);
        }

        map.forEach((array, a) => {
            for (let i = 0; i < array.length - 1; i++) {
                for (let j = i + 1; j < array.length; j++) {
                    let b = array[i];
                    let c = array[j];
                    let arrayB = map.get(b);
                    let arrayC = map.get(c);

                    if (arrayB?.includes(c)) {
                    	triangles.push(a, b, c);
                    } 
		    if (arrayC?.includes(b)) {
                    	triangles.push(a, c, b);
                    }


                }
            }
        });

    }


    openBackCamera() {
        const self = this;
        const video: any = document.getElementById("video");
        this.closeCamera();
        if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
            // Not adding `{ audio: true }` since we only want video now
            navigator.mediaDevices.getUserMedia({
                video: { facingMode: { exact: "environment" } }
            }).then(function (stream) {
                //video.src = window.URL.createObjectURL(stream);
                self.stream = stream;
                video.srcObject = stream;
                video.play();
            });
        }
    }

    openFontCamera() {
        const self = this;
        const video: any = document.getElementById("video");
        this.closeCamera();
        if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
            // Not adding `{ audio: true }` since we only want video now
            navigator.mediaDevices.getUserMedia({
                video: { 'facingMode': "user" }
            }).then(function (stream) {
                //video.src = window.URL.createObjectURL(stream);
                self.stream = stream;
                video.srcObject = stream;
                video.play();
            });
        }

    }
    closeCamera() {
        if (this.stream) {
            for (let t of this.stream.getVideoTracks()) {
                try {
                    this.stream.removeTrack(t)
                    if (t && t.stop) {
                        t.stop();
                    }
                } catch (e: any) {
                    console.error(e);
                }
            }
        }
    }


    resize() {
        let canvas = this.canvas;
        canvas.width = window.innerWidth;
        canvas.height = window.innerHeight;

        this.camera.aspect = canvas.width / canvas.height;
        this.camera.updateProjectionMatrix();
        this.renderer.setSize(canvas.width, canvas.height);
    }

    async updateFace() {
        if (this.detector && this.stream && this.created) {
            //let img: any = await loader.loadAsync("/1.jpg");
            let video: any = document.getElementById('video');
            const faces = await this.detector.estimateFaces(video);
            if (faces.length == 0) {
                return;
            }
            const position = new Array<number>();
            const normal = new Array<number>();
            const uv = new Array<number>();
            for (let i = 0; i < faces[0].keypoints.length; i++) {
                let pt = faces[0].keypoints[i];
                position.push(pt.x, pt.y, pt.z || 0);
                normal.push(Math.random(), Math.random(), Math.random());
                uv.push(Math.random(), Math.random());

            }
            //            geometry.setIndex(triangles);
            geometry.setAttribute('position', new THREE.Float32BufferAttribute(position, 3));
            //geometry.setAttribute('normal', new THREE.Float32BufferAttribute(normal, 3));
	    //geometry.computeVertexNormals();
            geometry.setAttribute('uv', new THREE.Float32BufferAttribute(uv, 2));
            geometry.attributes.position.needsUpdate = true;
            //geometry.attributes.normal.needsUpdate = true;
            geometry.attributes.uv.needsUpdate = true;
        }
    }

    async renderFace() {
        console.log('---1', this.detector, this.stream);
        if (this.detector && this.stream) {
            console.log('---21');
            //let img: any = await loader.loadAsync("/1.jpg");
            let video: any = document.getElementById('video');
            const faces = await this.detector.estimateFaces(video);
            console.log(faces);
            if (faces.length == 0) {
                return;
            }
            const position = new Array<number>();
            const normal = new Array<number>();
            const uv = new Array<number>();
            for (let i = 0; i < faces[0].keypoints.length; i++) {
                let pt = faces[0].keypoints[i];
                position.push(pt.x, pt.y, pt.z || 0);
                normal.push(Math.random(), Math.random(), Math.random());
                uv.push(Math.random(), Math.random());

            }
            geometry.setIndex(triangles);
            geometry.setAttribute('position', new THREE.Float32BufferAttribute(position, 3));
            //geometry.setAttribute('normal', new THREE.Float32BufferAttribute(normal, 3));
            geometry.setAttribute('uv', new THREE.Float32BufferAttribute(uv, 2));

	    geometry.computeVertexNormals();

            const material = new THREE.MeshStandardMaterial({ color: 0xFEDCBD });

            let mesh = new THREE.Mesh(geometry, material);
            let box = Utils.computeBoundingBox(mesh);
            let size = new THREE.Vector3();
            box.getSize(size);
            let center = new THREE.Vector3();
            box.getCenter(center);
            console.log(center);

            let r = Math.max(size.x, size.y, size.z);

            mesh.scale.divideScalar(r);
            center.divideScalar(r);

            mesh.position.x = -center.x;
            mesh.position.y = center.y;
            mesh.rotateX(Math.PI);
            this.scene.add(mesh);
            this.created = true;
        }
    }

    render(dt: number) {
        light.position.x = Math.sin(Math.PI) * 1000;
        this.updateFace();
        this.renderer.clear();
        this.renderer.render(this.scene, this.camera);
        let bar = this.barControls;
        this.renderer.render(bar.sceneBar, bar._camera);
    }
}

