<template>
  <div class="face-detection">
    <video ref="video" width="300" height="225" autoplay></video>
    <canvas ref="canvas" width="300" height="225" style="display: none;"></canvas>
  </div>
</template>

<script>
import tracking from 'tracking';


export default {
  name: 'FaceDetection',
  data() {
    return {
      tracker: null,
      isDetecting: false,
      lastCaptureTime: 0,
      captureInterval: 5000, // 拍照间隔，单位毫秒
    }
  },
   mounted() {
    try {
      if (!tracking) {
        throw new Error('错误');
      }
      console.log('---------------');
       this.startDetection();
    } catch (error) {
      console.error('异常了:', error);
    }
  },
  methods: {
     startDetection() {
      if (this.isDetecting) return;

      const video = this.$refs.video;
      const canvas = this.$refs.canvas;
      const context = canvas.getContext('2d');

      this.tracking.ColorTracker.registerColor('face', (r, g, b) => {
        const dx = r - 95;
        const dy = g - 40;
        const dz = b - 20;
        return (dx * dx + dy * dy + dz * dz) < 3500;
      });

      this.tracker = new this.tracking.ColorTracker(['face']);
      this.tracking.track(video, this.tracker, { camera: true });

      this.tracker.on('track', (event) => {
        if (!event || !event.data) {
          console.warn('Invalid event data received');
          return;
        }
        context.clearRect(0, 0, canvas.width, canvas.height);
        if (event.data.length > 0) {
          event.data.forEach((rect) => {
            context.strokeStyle = '#a64ceb';
            context.strokeRect(rect.x, rect.y, rect.width, rect.height);
          });
          this.capturePhoto();
        }
      });

      this.isDetecting = true;
      console.log('Face detection started');
    },
    capturePhoto() {
      const currentTime = Date.now();
      if (currentTime - this.lastCaptureTime < this.captureInterval) {
        return;
      }

      const video = this.$refs.video;
      const canvas = document.createElement('canvas');
      canvas.width = video.videoWidth;
      canvas.height = video.videoHeight;
      canvas.getContext('2d').drawImage(video, 0, 0);
      const capturedImage = canvas.toDataURL('image/jpeg');

      this.$emit('photo-captured', capturedImage);
      this.lastCaptureTime = currentTime;

      console.log('Photo captured');
    },
  },
  beforeUnmount() {
    if (this.tracker) {
      this.tracker.removeAllListeners();
    }
    this.isDetecting = false;
    console.log('Face detection component unmounted');
  },
}
</script>
