<template>
  <div class="container-fluid px-0">
    <div class="d-flex flex-column align-items-center justify-content-center">
      <div class="p-3 w-100">
        <select class="form-control" v-model="deviceId">
          <option value="" disabled>-- 选择相机 --</option>
          <option
            v-for="camera in cameras"
            :key="camera.deviceId"
            :value="camera.deviceId"
          >
            {{ camera.label }}
          </option>
        </select>
      </div>
      <div class="p-3 text-danger" v-if="errorMessage">{{ errorMessage }}</div>
      <div class="mx-3 overlay d-flex align-items-center justify-content-center">
        <WebCam
          id="webcam"
          ref="webcam"
          width="100%"
          height="100%"
          :deviceId="deviceId"
          @cameras="onCameras"
          @camera-change="onCameraChange"
          @error="onError"
          @notsupported="onError"
          @video-live="onVideoLive"
        />
      </div>
    </div>
  </div>
  <div
    aria-live="polite"
    aria-atomic="true"
    class="d-flex justify-content-center align-items-center w-100"
  >
    <div
      class="toast align-items-center"
      role="alert"
      aria-live="assertive"
      aria-atomic="true"
      ref="toastRef"
    >
      <div class="toast-body">{{ toastMessage }}</div>
    </div>
  </div>
</template>

<script setup>
  import { getCurrentInstance, onMounted, watch } from 'vue'
  import { useMainStore } from 'stores';
  import * as faceapi from '@vladmandic/face-api';
  import WebCam from 'components/WebCam.vue';
  import User from 'apis/user';
  import bootstrap from 'bootstrap/dist/js/bootstrap.bundle.min';

  const mainStore = useMainStore();

  let toast = null;
  let toastRef = $ref(null);
  let toastMessage = $ref('');
  let webcam = $ref(null);
  let deviceId = $ref("");
  let cameras = $ref([]);
  let errorMessage = $ref("");
  let spinner = $ref(false);
  let loadedModels = false;
  let boundDatabase= false;
  let users = $ref({});

  function onCameras(camerasList) {
    cameras = camerasList;
    deviceId = camerasList[0].deviceId;
  }

  function onCameraChange() {
    mainStore.setLoadingMsg('相机启动中');
    mainStore.setLoadingStatus(true);
  }

  function onError(error) {
      errorMessage = error;
      mainStore.setLoadingStatus(false);
    }

  async function onVideoLive() {
    if (!loadedModels) await loadModels();
    if (!boundDatabase) await bindDatabase();
    const webcam = document.querySelector('#webcam');
    webcam.setAttribute('width', '100%');
    webcam.setAttribute('height', '100%');
    const canvasDom = document.querySelector('canvas');
    const canvas = faceapi.createCanvasFromMedia(webcam);
    const canvasSize = { width: webcam.clientWidth, height: webcam.clientHeight };
    faceapi.matchDimensions(canvas, canvasSize);
    // reset canvas
    if (canvasDom) document.querySelector('.overlay').removeChild(canvasDom);
    document.querySelector('.overlay').appendChild(canvas);
    // load labels
    const labels = loadLabels();
    // close spinner loading
    mainStore.setLoadingStatus(false);
    // start
    setInterval(async () => {
      const detections = await faceapi
        .detectAllFaces(webcam, new faceapi.TinyFaceDetectorOptions())
        .withFaceLandmarks()
        .withFaceDescriptors();
      const resizeDetections = faceapi.resizeResults(detections, canvasSize);
      canvas.getContext('2d').clearRect(0, 0, canvasSize.width, canvasSize.height);
      // recognition
      const distanceArray = [];
      if (labels.length > 0) {
        const faceMatcher = new faceapi.FaceMatcher(labels, 0.35);
        const results = resizeDetections.map((d) => faceMatcher.findBestMatch(d.descriptor));
        results.forEach(async (result, index) => {
          const { box } = resizeDetections[index].detection;
          const { label, distance } = result;
          distanceArray[index] = distance;
          // boxDistance = distance;
          let toastMessageTemp = `『${label}』 打卡成功!`;
          if (distance < 0.35 && toastMessage !== toastMessageTemp) {
            toastMessage = toastMessageTemp;
            toast.show();
            await attendance(label);
          }
          // new faceapi.draw.DrawTextField(
          //   [`${label} (${parseInt(distance * 100, 10)})`],
          //   box.bottomRight,
          //   { backgroundColor: distance < 0.35 ? '#20c997' : '#6c757d' },
          // ).draw(canvas);
        });
      }
      // detection
      resizeDetections.forEach((detection, index) => {
        // const score = Math.ceil(detection.detection.score * 100) / 100;
        new faceapi.draw.DrawBox(
          {
            x: detection.detection.box.x,
            y: detection.detection.box.y,
            width: detection.detection.box.width,
            height: detection.detection.box.height,
          },
          { boxColor: (distanceArray[index] || 1) < 0.35 ? '#20c997' : '#6c757d' },
        ).draw(canvas);
        // new faceapi.draw.DrawTextField([`${score}`], detection.detection.box.bottomLeft, {
        //   backgroundColor: score > 0.85 ? '#20c997' : '#6c757d',
        // }).draw(canvas);
      });
    }, 300);
  }

  async function bindDatabase() {
    mainStore.setLoadingMsg('用户数据同步中');
    await User.getAllUser().then(result => {
      console.log(result);
      result.rows.forEach(row => {
        row.FaceDescriptors = JSON.parse(row.FaceDescriptors);
      });
      users = result.rows;
    });
    boundDatabase = true;
    return Promise.resolve();
  }

  async function loadModels() {
    mainStore.setLoadingMsg('模型载入中');
    await Promise.all([
      faceapi.nets.faceLandmark68Net.loadFromUri('/models'),
      faceapi.nets.faceRecognitionNet.loadFromUri('/models'),
      faceapi.nets.ssdMobilenetv1.loadFromUri('/models'),
      faceapi.nets.tinyFaceDetector.loadFromUri('/models'),
    ]);
    loadedModels = true;
    return Promise.resolve();
  }

  function loadLabels() {
    return Object.values(users).map((user) => {
      const descriptions = [];
      const imageLength = 5;
      for (let i = 0; i < imageLength; i += 1) {
        const features = Object.values(user.FaceDescriptors[i]);
        const float32Array = Float32Array.from(features);
        descriptions.push(float32Array);
      }
      return new faceapi.LabeledFaceDescriptors(user.UserName, descriptions);
    });
  }

  async function attendance(username) {
    console.log(username);
    await User.attendance({
      UserName: username
    });
  }
  // watch: {
  //   users() {
  //     if (!this.boundDatabase) return;
  //     this.onVideoLive();
  //   },
  // },

  watch(() => users, (newValue, oldValue) => {
    if (!boundDatabase) return;
    onVideoLive();
  })

  onMounted(() => {
    mainStore.setLoadingMsg('相机启动中');
    mainStore.setLoadingStatus(true);
    toast = new bootstrap.Toast(toastRef);
  });
</script>

<style lang="scss" scoped>
  .toast {
    margin-top: 30px;
  }
</style>
