<template>
  <div class="face-capture" id="face-capture">
    <div class="text-desc">刷脸认证</div>
    <div>{{ scanTip }}</div>
    <div>
      <button @click="reScan">打开人脸检测</button>
      <button @click="close">关闭人脸检测</button>
    </div>
    <div id="scan-face" class="scan-face">
      <img v-if="!showContainer" :src="imgUrl" />
      <video
        v-if="showContainer"
        ref="refVideo"
        id="video"
        autoplay
        preload
        loop
        muted
        playsinline
        webkit-playsinline
      ></video>
    </div>
    <div class="control-container face-capture">
      <canvas
        ref="refCanvas"
        :width="560"
        :height="560"
      ></canvas>
    </div>
    <div id="face-rect" class="face-rect"></div>
    
  </div>
</template>
<script>
// 引入trackingjs所需文件
import tracking from "@/assets/js/tracking-min.js";
import "@/assets/js/face-min.js";

export default {
  name: "FaceTracking",
  data() {
    return {
      screenSize: { width: window.screen.width, height: window.screen.height },
      URL: null,
      streamIns: null, // 视频流
      showContainer: true, // 显示
      tracker: null,
      tipFlag: false, // 提示用户已经检测到
      flag: false, // 判断是否已经拍照
      context: null, // canvas上下文
      profile: [], // 轮廓
      removePhotoID: null, // 停止转换图片
      scanTip: "人脸识别已关闭", // 提示文字
      imgUrl: "", // base64格式图片
    };
  },
  mounted() {
    // this.playVideo();
  },
  methods: {
    test(){
      console.log("test");
    },
    /**
     * 重新扫描
     * */
    reScan() {
      this.playVideo();
      this.imgUrl = "";
      this.showContainer = true;
    },
    /**
     * 视频播放 - 默认调起前置摄像头
     * */
    playVideo() {
      console.log("开启摄像头");
      this.getUserMedia(
        {
          video: {
            width: 560,
            height: 560,
            facingMode: "user" /* 摄像头前置优先 */,
          },
        },
        this.success,
        this.error
      );
    },
    /**
     * 访问用户媒体设备
     * @param constrains
     * @param success
     * @param error
     */
    getUserMedia(constrains, success, error) {
      if (navigator.mediaDevices.getUserMedia) {
        //最新标准API
        navigator.mediaDevices
          .getUserMedia(constrains)
          .then(success)
          .catch(error);
      } else if (navigator.webkitGetUserMedia) {
        //webkit内核浏览器
        navigator.webkitGetUserMedia(constrains).then(success).catch(error);
      } else if (navigator.mozGetUserMedia) {
        //Firefox浏览器
        navagator.mozGetUserMedia(constrains).then(success).catch(error);
      } else if (navigator.getUserMedia) {
        //旧版API
        navigator.getUserMedia(constrains).then(success).catch(error);
      } else {
        this.scanTip = "你的浏览器不支持访问用户媒体设备";
      }
    },
    success(stream) {
      this.streamIns = stream;
      // webkit内核浏览器
      this.URL = window.URL || window.webkitURL;
      if ("srcObject" in this.$refs.refVideo) {
        this.$refs.refVideo.srcObject = stream;
      } else {
        this.$refs.refVideo.src = this.URL.createObjectURL(stream);
      }
      this.$refs.refVideo.onloadedmetadata = (e) => {
        this.$refs.refVideo.play();
        this.initTracker();
      };
    },
    error(e) {
      // this.scanTip = "访问用户媒体失败" + e.name + "," + e.message
      this.scanTip = "访问用户媒体失败";
    },
    /**
     * 人脸捕捉
     */
    initTracker() {
      this.context = this.$refs.refCanvas.getContext("2d"); // 画布
      this.tracker = new window.tracking.ObjectTracker("face"); // tracker实例
      this.tracker.setInitialScale(4);
      this.tracker.setEdgesDensity(0.1);
      this.tracker.setStepSize(1.1);
      this.tracker.on("track", this.trackerTask); // 绑定监听方法 handleTracked
      try {
        window.tracking.track("#video", this.tracker); // 开始追踪
      } catch (e) {
        // this.scanTip = "访问用户媒体失败，请重试"
        this.scanTip = e;
      }
    },
    /**
     * 追踪事件2 【脸部追踪】
     * 将脸部使用方框标记出来
     *
     */
    trackerTask(e) {
      let el = document.getElementById("scan-face");
      let div = document.getElementById("face-rect");

      if (e.data.length === 0) {
        this.scanTip = "未检测到人脸";
        div.style.opacity = "0";
      } else {
        if (!this.tipFlag) {
          this.scanTip = "检测成功，正在拍照，请保持不动2秒";
          console.log(this.scanTip);
          //追踪检测过程中，不停的重绘面部标识框
          div.style.opacity = "1";
          e.data.forEach(function (rect) {
            div.style.border = "2px solid " + (rect.color || "#0aeb08");
            div.style.width = rect.width + "px";
            div.style.height = rect.height + "px";
            div.style.left = el.offsetLeft + rect.x + "px";
            div.style.top = el.offsetTop + rect.y + "px";
          });
        }
      }
    },
    /**
     * 追踪事件1 - 【脸部追踪】
     * */
    handleTracked(e) {
      //   console.log("检测人脸："+e.data);
      if (e.data.length === 0) {
        this.scanTip = "未检测到人脸";
      } else {
        if (!this.tipFlag) {
          this.scanTip = "检测成功，正在拍照，请保持不动2秒";
          console.log(this.scanTip);
        }
        // 1秒后拍照，仅拍一次
        if (!this.flag) {
          // this.scanTip = '拍照中...'
          this.flag = true;
          this.removePhotoID = setTimeout(() => {
            this.tackPhoto();
            this.tipFlag = true;
            console.log("拍照成功！");
            this.close();
          }, 2000);
        }
        e.data.forEach(this.plot);
      }
    },
    /**
     * 绘制跟踪框
     * */
    plot({ x, y, width: w, height: h }) {
      // 创建框对象
      this.profile.push({ width: w, height: h, left: x, top: y });
    },
    /**
     * 识别成功后拍照
     */
    tackPhoto() {
      this.context.drawImage(this.$refs.refVideo, 0, 0, 560, 560);
      // 保存为base64格式
      this.imgUrl = this.saveAsPNG(this.$refs.refCanvas);
      /** 拿到base64格式图片之后就可以在this.compare方法中去调用后端接口比较了，也可以调用getBlobBydataURI方法转化成文件再去比较
       * 我们项目里有一个设置个人头像的地方，先保存一下用户的图片，然后去拿这个图片的地址和当前拍照图片给后端接口去比较。
       * */
      // this.compare(imgUrl)
      this.scanTip = this.imgUrl;
      this.$emit("finish", this.imgUrl);
      this.close();
    },
    /**
     * 保存为png,base64格式图片
     * @desc canvas的dom转化为图片base64
     * */
    saveAsPNG(c) {
      return c.toDataURL("image/png", 0.3); // 参数二：图片质量；大小30K左右
    },
    /**
     * 关闭并清理资源
     */
    close() {
      this.flag = false;
      this.tipFlag = false;
      this.showContainer = false;
      this.tracker && this.tracker.removeListener("track", this.handleTracked);
      this.tracker = null;
      this.context = null;
      this.profile = [];
      this.scanTip = "人脸识别已关闭";
      clearTimeout(this.removePhotoID);
      if (this.streamIns) {
        this.streamIns.enabled = false;
        this.streamIns.getTracks()[0].stop();
        this.streamIns.getVideoTracks()[0].stop();
      }
      this.streamIns = null;
    },
  },
};
</script>
<style scoped>
.face-capture {
  display: flex;
  flex-direction: column;
  align-items: center;
  justify-content: center;
}
.scan-face {
  width: 200px;
  height: 200px;
  border-radius: 50%;
  border-top: 0.06rem solid rgba(3, 169, 244, 0.5);
  border-bottom: 0.06rem solid rgba(3, 169, 244, 0.5);
  border-left: 0.04rem solid rgba(3, 169, 244, 0.5);
  border-right: 0.04rem solid rgba(3, 169, 244, 0.5);
  box-shadow: 0 0 0.1rem #c5c4c4;
}

.scan-face img {
  border-radius: 50%;
  width: 100%;
  height: 100%;
}
.scan-face video {
  width: 100%;
  height: 100%;
  -o-object-fit: cover;
  object-fit: cover;
  background-repeat: no-repeat;
  background-size: 100% 100%;
  background-color: rgba(117, 128, 0, 0.04);
  border-radius: 50%;
  z-index: 2;
}

#canvas-face {
  border: 2px solid #0aeb08;
}

.face-capture .rect {
  border: 2px solid #0aeb08;
  position: fixed;
  z-index: 3;
}

.text-desc {
  font-size: 18px;
  padding: 0.3rem 0 0.5rem;
}


.face-capture canvas {
  opacity: 0;
}

.face-rect {
  position: absolute;
  width: 50px;
  height: 50px;
  border: 2px solid #0aeb08;
  opacity: 0;
}
</style>