<template>
  <view class="typeIn">
    <view class="typeIn_box">
      <view class="typeIn_box_help">
        <view @click="help">
          <image src="@/static/images/veriFace/help.png" mode=""></image>
          拍摄要求
        </view>
      </view>

      <view class="typeIn_box_face">
        <view class="typeIn_box_face_tit">
          <h4>人脸照片录入</h4>请全脸正对手机，五官露出，保证光线充足
        </view>
        <view class="typeIn_box_face_pho" :style="!cameraShow ? {background: '#fff'} : ''">
          <image v-if="faceImg" :src="faceImg" mode="aspectFill" width="250px" height="250px"></image>
          <template v-else>
            <video v-show="cameraShow" ref="video" id="myVideo" class="typeIn_box_face_pho_video" :style="videoStyle"
                   :show-center-play-btn="false" :controls="false" :enable-progress-gesture="false" muted
                   @play="onPlayVideo"></video>
            <image v-if="!cameraShow" @click="useCamera" src="@/static/images/veriFace/phobtn.png" mode=""></image>
            <image src="@/static/images/veriFace/face-mask-bg.png" mode=""></image>
          </template>
        </view>
        <image v-if="cameraShow" src="@/static/images/veriFace/inversion.png" class="inversion" @click="cameraSwitch">
        </image>
        <view class="typeIn_box_face_btn">
          <view v-if="!canPhoto && cameraShow && waitTimes == 0" class="typeIn_box_face_btn_warning">
            <image src="@/static/images/veriFace/warning.png" style="margin-right: 6rpx;"></image>
            未检测到人脸
          </view>
          <view v-if="faceImg" :class="{'typeIn_box_face_btn_confirm': true, 'disabled': uploading}" @click="upload">
            {{ uploading ? '上传中...' : '上传' }}
          </view>
          <view v-else :class="{'typeIn_box_face_btn_confirm': true, 'disabled': !canPhoto}" @click="captureFrame">完成
          </view>
        </view>
      </view>
    </view>
    <u-popup v-model="helpShow" mode="bottom" border-radius="14" :mask-close-able="true" :closeable="true">
      <view class="help_box">
        <view class="help_box_tit">
          拍摄要求
        </view>
        <view class="help_box_tits">
          人脸照片拍摄要求
        </view>
        <view class="help_box_txt">
          <p>1.人脸正面照片，不要低头、偏头、侧脸</p>
          <p>2.五官全部露出，避免五官不完整或面部遮挡</p>
          <p>3.人脸大小占整张照片1/3以上，拍摄大头照,不要拍摄全身或半身照</p>
          <p>4.戴眼镜的同学，需要照片中眼镜不反光</p>
          <p>5.照片需清晰，保证较高分辨率</p>
          <p>6.面部光照强度适中，避免面部太暗或明暗不均匀</p>
          <p>7.表情正常，避免夸张表情</p>
          <p>8.照片中仅可有一个人的脸，不可出现多人</p>
          <p>9.背景尽量干净无干扰，纯色最佳</p>
          <p>10.请拍摄或使用近期人脸照片</p>
          <p>11.使用拍摄原图，不要进行美颜或修图哦</p>
          <view>
            <image class="help_box_txt_one" src="@/static/images/veriFace/help_1.png" mode=""></image>
          </view>
        </view>
        <view class="help_box_tits help_box_tits_error">
          错误示例
        </view>
        <view class="help_box_txt">
          <image class="help_box_txt_two" src="@/static/images/veriFace/help_2.png" mode=""></image>
        </view>
      </view>
    </u-popup>
  </view>
</template>

<script>
import {
  API_URL
} from '@/env'
import * as faceapi from 'face-api.js'

export default {
  data() {
    return {
      helpShow: false,
      hasAlbum: true,
      initScale: 1.277,
      original: true,
      device: 'front',
      videoShow: false,
      pictureShow: false,
      // 图片地址
      picture: '',
      // 用于视频识别的节点
      canvas: null,
      video: null,
      image: null,
      timeout: 0,
      // 模型识别的条件
      options: '',
      // 提示控制
      noOne: '',
      moreThanOne: '',
      // 不是通过Https访问提示
      httpsAlert: '',

      videoWidth: 0,
      videoHeight: 0,
      videoStyle: {
        width: '250px',
        height: '250px'
      },
      waitTimes: 0,
      tracks: null,
      cameraShow: false,
      canPhoto: false,
      faceImg: '',
      facingMode: 'user',
      uploading: false,
      token: null,
      tinyFaceDetectorLoad: false
    }
  },
  mounted() {
    const systemInfo = uni.getSystemInfoSync()
    let width = Math.min(systemInfo.windowWidth, systemInfo.windowHeight)
    let height = Math.max(systemInfo.windowWidth, systemInfo.windowHeight)
    this.videoWidth = height
    this.videoHeight = width
    let zoomRatio = 250 / width
    width = 250
    height = height * zoomRatio
    this.videoStyle.height = height + 'px'
    this.videoStyle.width = width + 'px'
    this.videoStyle.top = -(height - 250) / 2 + 'px'
    //this.videoStyle.left = -(width - 250) / 2 + 'px'
  },
  onLoad(option) {
    if (option.t) {
      this.token = option.t
      uni.setStorageSync('token', this.token)
    }
    faceapi.nets.tinyFaceDetector.load('/static/weights').then(e => {
      this.tinyFaceDetectorLoad = true
    })
  },
  computed: {
    scale: function () {
      if (this.initScale > 1.77) return 1.77;
      if (this.initScale < 1.27) return 1.27;
      return this.initScale;
    }
  },
  methods: {
    cameraSwitch() {
      this.facingMode = this.facingMode == 'environment' ? 'user' : 'environment'
      this.useCamera()
    },
    async useCamera() {
      uni.showLoading({
        title: 'loading...'
      })
      let constraints = {
        audio: false,
        video: {
          facingMode: this.facingMode,
          width: {
            ideal: this.videoWidth
          },
          height: {
            ideal: this.videoHeight
          }
        }
      }
      console.log(0, 'init')
      await new Promise((resolve, reject) => {
        const interval = setInterval(() => {
          if (this.tinyFaceDetectorLoad) {
            clearInterval(interval)
            return resolve()
          }
        }, 10)
      })
      console.log(1, 'loadTinyFaceDetector')
      //await faceapi.nets.tinyFaceDetector.load('/static/weights')
      try {
        if (navigator.mediaDevices === undefined) {
          navigator.mediaDevices = {};
        }
        console.log(2, 'navigator.mediaDevices', navigator.mediaDevices)
        if (navigator.mediaDevices.getUserMedia === undefined) {
          navigator.mediaDevices.getUserMedia = function (constraints) {
            var getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator
                .mozGetUserMedia || navigator.msGetUserMedia || navigator.oGetUserMedia;
            if (!getUserMedia) {
              return Promise.reject(new Error('getUserMedia is not implemented in this browser'));
            }
            console.log(3, 'getUserMedia', getUserMedia)
            return new Promise(function (resolve, reject) {
              getUserMedia.call(navigator, constraints, resolve, reject);
            });
          }
        }
        navigator.mediaDevices.getUserMedia(constraints).then((stream) => {
          console.log(4, 'getUserMedia.stream', stream)
          this.tracks = stream.getTracks()
          console.log(5, 'getUserMedia.stream.getTracks', this.tracks)
          const video = document.querySelector('video')
          console.log(6, 'getUserMedia.video', video, 'srcObject' in video)
          if ('srcObject' in video) {
            video.srcObject = stream
          } else {
            video.src = window.URL.createObjectURL(stream)
          }
          video.onloadedmetadata = e => {
            console.log(7, 'video.onloadedmetadata', e)
            this.cameraShow = true
            uni.hideLoading()
            video.play()
          }
        }).catch((error) => {
          console.error('navigator.mediaDevices.getUserMedia', error)
        })
      } catch (err) {
        console.error('navigator.mediaDevices', err)
      }
    },
    async closeCamera() {
      this.tracks.forEach(await
          function (track) {
            track.stop()
          })
      const video = document.querySelector('video')
      if (video) {
        video.srcObject = null
      }
    },
    async onPlayVideo() {
      const videoEl = document.querySelector('video')
      if (!videoEl) {
        return
      }
      if (videoEl.paused || videoEl.ended || !this.isFaceDetectionModelLoaded()) {
        return setTimeout(() => this.onPlayVideo())
      }
      const options = new faceapi.TinyFaceDetectorOptions({
        inputSize: 512,
        scoreThreshold: 0.5
      })
      const result = await faceapi.detectSingleFace(videoEl, options)
      if (result) {
        if (this.waitTimes++ > 5) {
          this.canPhoto = true
        }
      } else {
        this.waitTimes = 0
        this.canPhoto = false
      }
      setTimeout(() => this.onPlayVideo())
    },
    getCurrentFaceDetectionNet() {
      return faceapi.nets.tinyFaceDetector
    },
    isFaceDetectionModelLoaded() {
      return !!this.getCurrentFaceDetectionNet().params
    },
    captureFrame() {
      if (!this.canPhoto) {
        return
      }
      const video = document.querySelector('video')
      const canvas = document.createElement('canvas')
      canvas.width = video.videoWidth
      canvas.height = video.videoHeight
      const ctx = canvas.getContext('2d')
      ctx.translate(canvas.width, 0)
      ctx.scale(-1, 1)
      ctx.drawImage(video, 0, 0, canvas.width, canvas.height)
      const img = canvas.toDataURL('image/png')
      this.cameraShow = false
      this.closeCamera()
      this.faceImg = img
    },
    dataURLtoBlob(dataurl) {
      let arr = dataurl.split(','),
          mime = arr[0].match(/:(.*?);/)[1],
          bstr = atob(arr[1]),
          n = bstr.length,
          u8arr = new Uint8Array(n);
      while (n--) {
        u8arr[n] = bstr.charCodeAt(n);
      }
      return new Blob([u8arr], {
        type: mime
      });
    },
    close() {
      this.$emit("close")
    },
    help() {
      this.helpShow = true
    },
    upload() {
      if (this.uploading) {
        return
      }
      this.uploading = true
      const file = this.dataURLtoBlob(this.faceImg)
      uni.uploadFile({
        url: API_URL + '/upload/aliUploadImage',
        file: file,
        name: 'file',
        header: {
          Authorization: 'Bearer' + ' ' + uni.getStorageSync('token')
        },
        success: uploadFileRes => {
          const res = JSON.parse(uploadFileRes.data)
          this.$http(
              'veriFace.save', {
                avatar: res.data
              }
          ).then(res => {
            if (res.code == 0) {
              uni.showToast({
                title: '照片录入成功！',
                icon: 'success',
                duration: 1000,
                mask: true
              })
              setTimeout(() => {
                if (this.token) {
                  const script = document.createElement('script')
                  script.src = 'https://res.wx.qq.com/open/js/jweixin-1.3.2.js'
                  script.async = true
                  script.onload = () => {
                    wx.miniProgram.navigateBack()
                  }
                  document.head.appendChild(script)
                  uni.removeStorageSync('token')
                } else {
                  uni.navigateBack()
                }
              }, 800)
            } else {
              this.uploading = false
              this.$u.toast(res.msg + ':' + res.data.message)
            }
          }).catch(e => {
            this.uploading = false
            this.$u.toast('接口请求错误！')
          })
        },
        fail: () => {
          this.uploading = false
          this.$u.toast('接口请求错误！')
        }
      })
    }
  }
}
</script>

<style lang="scss" scoped>
video::-webkit-media-controls-timeline {
  display: none !important;
}

.typeIn {
  .retry {
    margin-left: 4px;
    text-decoration: underline;
    color: #1989F9;
  }

  .inversion {
    width: 20px;
    height: 18px;
    position: relative;
    top: -73px;
    right: -150px;
  }

  &_box {
    &_help {
      display: flex;
      align-items: center;
      justify-content: end;
      padding: 40rpx;

      view {
        display: flex;
        align-items: center;
      }

      image {
        width: 30rpx;
        height: 30rpx;
        margin-right: 10rpx;
      }
    }

    &_face {
      padding: 40rpx;
      box-sizing: border-box;
      text-align: center;

      &_tit {
        color: #8E8E8E;
        font-size: 30rpx;

        h4 {
          color: #1989F9;
          margin-bottom: 20rpx;
          font-size: 40rpx;
        }
      }

      &_pho {
        width: 500rpx;
        height: 500rpx;
        margin: 100rpx auto;
        overflow: hidden;
        background: #8E8E8E;
        position: relative;
        border-radius: 50%;

        &_video {
          position: absolute;
          transform: scaleX(-1);
          //object-fit: cover;
          z-index: 0;
        }

        image {
          width: 500rpx;
          height: 500rpx;
          z-index: 9999;
        }
      }

      &_btn {
        left: 40rpx;
        right: 40rpx;
        display: flex;
        flex-wrap: wrap;
        flex-direction: column;
        justify-content: flex-end;
        height: 200rpx;
        margin-top: -32px;

        image {
          width: 30rpx;
          height: 30rpx;
        }

        view {
          padding: 24rpx;
          border-radius: 20rpx;
        }

        .disabled {
          opacity: .5;
        }

        &_warning {
          background: #FDEBE1;
          color: #F57648;
          border: 1px solid #F57648;
          margin-bottom: 20rpx;
          display: flex;
          justify-content: center;
          align-items: center;
        }

        &_confirm {
          background: #1989F9;
          color: #fff;
        }
      }
    }

  }

  .help_box {
    height: 90vh;
    overflow-y: scroll;
    padding: 80rpx 30rpx;
    position: relative;
    background: #f2f6ff;

    &_tit {
      text-align: center;
      font-weight: bold;
      margin-bottom: 30rpx;
      position: fixed;
      left: 0;
      right: 0;
      top: 0;
      padding: 30rpx;
      background: #f2f6ff;
      z-index: 1;
    }

    &_tits {
      margin: 20rpx 0;
      background-image: url(@/static/images/veriFace/info_back.png);
      background-size: cover;
      color: #fff;
      display: inline-block;
      padding: 10rpx 30rpx;
      min-width: 200rpx;

      &_error {
        background-image: url(@/static/images/veriFace/error_back.png);
      }
    }

    &_txt {
      p {
        line-height: 40rpx;
      }

      view {
        text-align: center;
      }

      &_one {
        width: 300rpx;
        height: 440rpx;
        margin: 50rpx auto;
      }

      &_two {
        width: 100%;
        height: 1200rpx;
      }
    }
  }
}
</style>