<template>
  <div v-loading="loadingModel" style="display: flex; flex-direction: column; align-items: center;">
    <div class="video-top-bar" :style="{ width: width + 'px' }">
      <el-button :icon="btnIcon" circle :disabled="btnDisabled" class="play-pause-btn" @click="switchPlay" />
    </div>
    <div style="position: relative" :style="{ width: width + 'px' }">
      <canvas ref="canvas" :width="width" :height="height" />
      <canvas ref="screenShotCanvas" style="display: none" />
      <video ref="video" :width="width" :height="height" class="hidden-video" />
    </div>
  </div>
</template>

<script>
import * as blazeface from './blazeface.js'

export default {
  name: 'FaceDetector',
  props: {
    width: {
      type: String,
      required: true
    },
    height: {
      type: String,
      required: true
    }
  },
  data() {
    return {
      video: null,
      canvas: null,
      screenShotCanvas: null,
      videoWidth: 0,
      videoHeight: 0,
      ctx: null,
      model: null,
      playing: false,
      btnIcon: 'el-icon-video-play',
      loadingModel: false,
      btnDisabled: false
    }
  },
  mounted() {
    this.video = this.$refs['video']
    this.canvas = this.$refs['canvas']
    this.screenShotCanvas = this.$refs['screenShotCanvas']
    this.ctx = this.canvas.getContext('2d')
    this.ctx.translate(this.canvas.width, 0)
    this.ctx.scale(-1, 1)
    this.setupCanvas()
  },
  beforeDestroy() {
    this.tearDownCamera()
  },
  methods: {
    switchPlay() {
      if (!this.playing) {
        this.playing = true
        this.setUpPage()
        this.btnIcon = 'el-icon-video-pause'
      } else {
        this.playing = false
        this.tearDownCamera()
        this.btnIcon = 'el-icon-video-play'
      }
    },
    setupCanvas() {
      this.ctx.fillStyle = '#2eb5e5'
      this.ctx.fillRect(0, 0, this.canvas.width, this.canvas.height)
    },
    async setUpPage() {
      this.loadingModel = true
      this.btnDisabled = true
      await this.setupCamera()
      this.video.play()
      this.videoWidth = this.video.videoWidth
      this.videoHeight = this.video.videoHeight
      // this.canvas.width = this.videoWidth
      // this.canvas.height = this.videoHeight
      if (!this.model) {
        this.model = await blazeface.load()
      }
      this.loadingModel = false
      this.btnDisabled = false
      this.renderPrediction()
    },
    async setupCamera() {
      const stream = await navigator.mediaDevices.getUserMedia({
        audio: false,
        video: { facingMode: 'user' }
      })
      this.video.srcObject = stream
      return new Promise(resolve => {
        this.video.onloadedmetadata = () => {
          resolve(this.video)
        }
      })
    },
    async renderPrediction() {
      const predictions = await this.model.estimateFaces(this.video)
      this.ctx.drawImage(this.video, 0, 0, this.videoWidth, this.videoHeight)
      if (predictions.length > 0) {
        for (let i = 0; i < predictions.length; i++) {
          // bounding box
          const start = predictions[i].topLeft
          const end = predictions[i].bottomRight
          const size = [end[0] - start[0], end[1] - start[1]]
          this.ctx.beginPath()
          this.ctx.lineWidth = '4'
          this.ctx.strokeStyle = 'rgba(255, 0, 0, 0.5)'
          this.ctx.rect(start[0], start[1], size[0], size[1])
          this.ctx.stroke()
          // landmarks
          const landmarks = predictions[i].landmarks
          this.ctx.fillStyle = 'blue'
          for (let j = 0; j < landmarks.length; j++) {
            const x = landmarks[j][0]
            const y = landmarks[j][1]
            this.ctx.fillRect(x, y, 5, 5)
          }
        }
      }
      if (!this.playing) {
        return
      }
      requestAnimationFrame(this.renderPrediction)
    },
    tearDownCamera() {
      const stream = this.video.srcObject
      if (stream) {
        const tracks = stream.getTracks()
        tracks.forEach(track => {
          track.stop()
        })
        this.setupCanvas()
      }
    },
    async takeScreenShot() {
      if (!this.model) {
        this.playing = true
        await this.setUpPage()
        this.btnIcon = 'el-icon-video-pause'
      }
      this.model.estimateFaces(this.video).then(predictions => {
        if (predictions.length > 0) {
          const start = predictions[0].topLeft
          const end = predictions[0].bottomRight
          const size = [end[0] - start[0], end[1] - start[1]]
          this.screenShotCanvas.width = size[0]
          this.screenShotCanvas.height = size[1]
          const screenShotCtx = this.screenShotCanvas.getContext('2d')
          screenShotCtx.translate(this.screenShotCanvas.width, 0)
          screenShotCtx.scale(-1, 1)
          screenShotCtx.drawImage(this.video, start[0], start[1], size[0], size[1], 0, 0, size[0], size[1])
          this.$emit('screenShot', this.screenShotCanvas.toDataURL('image/webp'))
        } else {
          this.$emit('screenShot', null)
        }
      })
    }
  }
}
</script>

<style lang="scss" scoped>
.mg-auto {
  margin:0 auto;
}
.mg-bottom-20 {
  margin-bottom: 20px;
}
.hidden-video {
  position: absolute;
  top: 0;
  left: 0;
  -webkit-transform: scaleX(-1);
  transform: scaleX(-1);
  visibility: hidden;
}
.video-top-bar {
  display: flex;
  justify-content: center;
  align-items: center;
  background-color: #182652;
  height: 50px;
}
</style>
