<!--
*@author 白晴语
*@date 2023-10-12 10:48
*@description:
-->
<template>
  <div style="width: 50%">

    <el-menu
        :default-active="activeIndex"
        class="el-menu-demo"
        mode="horizontal"
        @select="handleSelect"
    >
      <el-menu-item index="1" @click="this.$refs.RCB.isShow = true">设置</el-menu-item>
      <el-menu-item index="3" @click="fnOpen">打开摄像头</el-menu-item>
      <el-menu-item index="4" @click="fnClose">关闭摄像头</el-menu-item>
      <el-menu-item v-if="OpenFaceExpression===1" index="2"  @click="getFaceExpression">获取表情特征值</el-menu-item>
      <el-menu-item v-if="OpenFaceExpression===0" index="2" disabled @click="getFaceExpression">获取表情特征值</el-menu-item>
    </el-menu>
    <!--摄像位置-->
    <div class="see" style="margin: auto;width: 500px; position: relative; display: inline-block;">
      <video
          id="myVideo"
          poster="https://dummyimage.com/500X500"
          muted
          loop
          playsinline
          @loadedmetadata="fnRun"
      ></video>
      <canvas id="myCanvas" style="position: absolute; left: 0; top: 0;"></canvas>
    </div>
  </div>
  <div style="width: 50%">

  </div>





  <!--侧边栏-->
  <RightControBar ref="RCB" @ruleListener="updateRule"></RightControBar>
</template>
<script>
import { ElMessage } from 'element-plus'
import { toRaw } from '@vue/reactivity'
import RightControBar from "./RightControBar.vue";
import * as faceapi from "face-api.js";
import {Camera, CameraFilled} from "@element-plus/icons-vue";

export default {
  name: "FaceDetect",
  components: {CameraFilled, Camera, RightControBar},
  props: {},
  data() {
    return {
      OpenFaceExpression:0,
      // face-api arg
      faceExpression: null,
      nets: "tinyFaceDetector", // 模型
      options: null, // 模型参数
      withBoxes: true, // 框or轮廓
      detectFace: "detectSingleFace", // 单or多人脸
      detection: "landmark",
      videoEl: null,
      canvasEl: null,
      timeout: 0,
      // 视频媒体参数配置
      constraints: {
        audio: false,
        video: {
          // ideal（应用最理想的）
          width: {
            ideal: 500,
          },
          height: {
            ideal: 500,
          },
          // frameRate受限带宽传输时，低帧率可能更适宜
          frameRate: {
            min: 15,
            ideal: 30,
            max: 60,
          },
          // 显示模式前置后置
          facingMode: "environment",
        },
      },
    }
  },
  methods: {
    updateRule(val) {
      let ruleVal = val[0]
      let ruleName = val[1]
      if (ruleName === 'withBoxes') {
        switch (ruleVal) {
          case '1':
            this[ruleName] = true
            break;
          case '0':
            this[ruleName] = false
            break;
        }
      } else {
        this[ruleName] = ruleVal
      }
    },

    getFaceExpression() {
      ElMessage({
        message: "获取成功",
        type:'success'
      })
      console.log(this.faceExpression)
      console.log(toRaw(this.faceExpression).expressions)
      this.$store.commit('getFaceExpression',toRaw(this.faceExpression).expressions)
    },
    // 初始化模型加载
    async fnInit() {
      await faceapi.nets[this.nets].loadFromUri("/models"); // 算法模型
      await faceapi.loadFaceLandmarkModel("/models"); // 轮廓模型
      await faceapi.loadFaceExpressionModel("/models"); // 表情模型
      await faceapi.loadAgeGenderModel("/models"); // 年龄模型
      // 根据算法模型参数识别调整结果
      switch (this.nets) {
        case "ssdMobilenetv1":
          this.options = new faceapi.SsdMobilenetv1Options({
            minConfidence: 0.5, // 0.1 ~ 0.9
          });
          break;
        case "tinyFaceDetector":
          this.options = new faceapi.TinyFaceDetectorOptions({
            inputSize: 512, // 160 224 320 416 512 608
            scoreThreshold: 0.5, // 0.1 ~ 0.9
          });
          break;
        case "mtcnn":
          this.options = new faceapi.MtcnnOptions({
            minFaceSize: 20, // 0.1 ~ 0.9
            scaleFactor: 0.709, // 0.1 ~ 0.9
          });
          break;
      }
      // 节点属性化
      this.videoEl = document.getElementById("myVideo");
      this.canvasEl = document.getElementById("myCanvas");
    },
    // 人脸面部勘探轮廓识别绘制
    async fnRunFaceLandmark() {
      console.log("RunFaceLandmark");
      if (this.videoEl.paused) return clearTimeout(this.timeout);
      // 识别绘制人脸信息
      const result = await faceapi[this.detectFace](
          this.videoEl,
          this.options
      ).withFaceLandmarks();
      if (result && !this.videoEl.paused) {
        const dims = faceapi.matchDimensions(this.canvasEl, this.videoEl, true);
        const resizeResult = faceapi.resizeResults(result, dims);
        this.withBoxes
            ? faceapi.draw.drawDetections(this.canvasEl, resizeResult)
            : faceapi.draw.drawFaceLandmarks(this.canvasEl, resizeResult);
      } else {
        this.canvasEl
            .getContext("2d")
            .clearRect(0, 0, this.canvasEl.width, this.canvasEl.height);
      }
      this.timeout = setTimeout(() => this.fnRunFaceLandmark());
    },
    // 人脸表情识别绘制
    async fnRunFaceExpression() {
      console.log("RunFaceExpression");
      if (this.videoEl.paused) return clearTimeout(this.timeout);
      // 识别绘制人脸信息
      const result = await faceapi[this.detectFace](this.videoEl, this.options)
          .withFaceLandmarks()
          .withFaceExpressions();
      if (result && !this.videoEl.paused) {
        // this.faceExpression = result;
        const dims = faceapi.matchDimensions(this.canvasEl, this.videoEl, true);
        const resizeResult = faceapi.resizeResults(result, dims);
        this.faceExpression = resizeResult
        this.withBoxes
            ? faceapi.draw.drawDetections(this.canvasEl, resizeResult)
            : faceapi.draw.drawFaceLandmarks(this.canvasEl, resizeResult);
        faceapi.draw.drawFaceExpressions(this.canvasEl, resizeResult, 0.05);

      } else {
        this.canvasEl
            .getContext("2d")
            .clearRect(0, 0, this.canvasEl.width, this.canvasEl.height);
      }
      this.timeout = setTimeout(() => this.fnRunFaceExpression());
    },
    // 年龄性别识别绘制
    async fnRunFaceAgeAndGender() {
      console.log("RunFaceAgeAndGender");
      if (this.videoEl.paused) return clearTimeout(this.timeout);
      // 识别绘制人脸信息
      const result = await faceapi[this.detectFace](this.videoEl, this.options)
          .withFaceLandmarks()
          .withAgeAndGender();
      if (result && !this.videoEl.paused) {
        const dims = faceapi.matchDimensions(this.canvasEl, this.videoEl, true);
        const resizeResults = faceapi.resizeResults(result, dims);
        this.withBoxes
            ? faceapi.draw.drawDetections(this.canvasEl, resizeResults)
            : faceapi.draw.drawFaceLandmarks(this.canvasEl, resizeResults);
        if (Array.isArray(resizeResults)) {
          resizeResults.forEach((result) => {
            const {age, gender, genderProbability} = result;
            new faceapi.draw.DrawTextField(
                [
                  `${Math.round(age, 0)} years`,
                  `${gender} (${Math.round(genderProbability)})`,
                ],
                result.detection.box.bottomLeft
            ).draw(this.canvasEl);
          });
        } else {
          const {age, gender, genderProbability} = resizeResults;
          new faceapi.draw.DrawTextField(
              [
                `${Math.round(age, 0)} years`,
                `${gender} (${Math.round(genderProbability)})`,
              ],
              resizeResults.detection.box.bottomLeft
          ).draw(this.canvasEl);
        }
      } else {
        this.canvasEl
            .getContext("2d")
            .clearRect(0, 0, this.canvasEl.width, this.canvasEl.height);
      }
      this.timeout = setTimeout(() => this.fnRunFaceAgeAndGender());
    },
    // 执行检测识别类型
    fnRun() {
      if (this.detection === "landmark") {
        this.fnRunFaceLandmark();
        return;
      }
      if (this.detection === "expression") {
        this.fnRunFaceExpression();
        return;
      }
      if (this.detection === "age_gender") {
        this.fnRunFaceAgeAndGender();
        return;
      }
    },
    // 启动摄像头视频媒体
    fnOpen() {
      ElMessage({
        message: "正在打开摄像头",
        type:'success'
      })
      this.OpenFaceExpression=1
      if (typeof window.stream === "object") return;
      clearTimeout(this.timeout);
      this.timeout = setTimeout(() => {
        clearTimeout(this.timeout);
        navigator.mediaDevices
            .getUserMedia(this.constraints)
            .then(this.fnSuccess)
            .catch(this.fnError);
      }, 300);
    },
    // 成功启动视频媒体流
    fnSuccess(stream) {
      window.stream = stream; // 使流对浏览器控制台可用
      this.videoEl.srcObject = stream;
      this.videoEl.play();
    },
    // 失败启动视频媒体流
    fnError(error) {
      console.log(error);
      alert("视频媒体流获取错误" + error);
    },
    // 结束摄像头视频媒体
    fnClose() {
      this.canvasEl
          .getContext("2d")
          .clearRect(0, 0, this.canvasEl.width, this.canvasEl.height);
      this.videoEl.pause();
      clearTimeout(this.timeout);
      if (typeof window.stream === "object") {
        window.stream.getTracks().forEach((track) => track.stop());
        window.stream = "";
        this.videoEl.srcObject = null;
      }
    },
  },
  watch: {
    nets(val) {
      this.nets = val;
      this.fnInit();
    },
    detection(val) {
      this.detection = val;
      this.videoEl.pause();
      setTimeout(() => {
        this.videoEl.play();
        setTimeout(() => this.fnRun(), 300);
      }, 300);
    },
  },
  mounted() {
    this.$nextTick(() => {
      this.fnInit();
    });
  },
  beforeDestroy() {
    this.fnClose();
  },
}
</script>

<style scoped lang="less">
</style>
 