<template>
  <div id="livedetection">
    <h1>活体检测</h1>
    <div class="box">
      <div class="left">
        <div class="video">
          <video ref="video" width="100%" height="400" autoplay></video>
        </div>
        <div class="btns">
          <el-button type="warning" @click="openCamera">打开摄像头</el-button>
        </div>
      </div>
      <div class="right">
        <div class="detail">
          <h2>活体检测结果</h2>
          <div class="result">
            <p>活体检测结果: {{ result }}</p>
            <p>活体检测分数: {{ score }}</p>
          </div>
        </div>
        <div class="text">
          <h4>单项检测</h4>
          <div class="item">
            <el-radio-group v-model="radio1">
              <el-radio-button label="眨眼检测"></el-radio-button>
              <el-radio-button label="张嘴检测"></el-radio-button>
              <el-radio-button label="摇头检测"></el-radio-button>
              <el-radio-button label="点头检测"></el-radio-button>
            </el-radio-group>
          </div>
          <div class="resultd">
            <h5>单项检测结果: {{ onetext }}</h5>
            <el-button type="primary" @click="handleSingleDetection">开始检测</el-button>
          </div>
        </div>
      </div>
    </div>
  </div>
</template>

<script>
import io from 'socket.io-client';
import { recordOperation } from '@/api/logService'; // 引入日志记录模块

export default {
  name: "LiveDetection",
  data() {
    return {
      result: "",
      score: "",
      radio1: "眨眼检测",
      onetext: "",
      socket: null,
      videoFile: null,
      useCamera: false
    };
  },
  methods: {
    openCamera() {
      if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
        console.log("Browser supports getUserMedia.");
        navigator.mediaDevices.getUserMedia({ video: true })
          .then((stream) => {
            this.$refs.video.srcObject = stream;
            this.$refs.video.play();
            this.useCamera = true; // 使用摄像头
            this.startSocket(stream);
            recordOperation('User opened camera'); // 记录打开摄像头操作
          })
          .catch((error) => {
            console.error("Error opening camera:", error);
            alert("无法访问摄像头：" + error.message);
            recordOperation(`User failed to open camera: ${error.message}`); // 记录打开摄像头失败操作
          });
      } else {
        console.error("navigator.mediaDevices:", navigator.mediaDevices);
        console.error("navigator.mediaDevices.getUserMedia:", navigator.mediaDevices ? navigator.mediaDevices.getUserMedia : "undefined");
        alert("您的浏览器不支持摄像头访问，请使用支持的浏览器。");
        recordOperation('User attempted to open camera with unsupported browser'); // 记录不支持的浏览器操作
      }
    },
    startSocket(stream) {
      this.socket = io('http://192.168.33.201:3001');
      
      const videoTrack = stream.getVideoTracks()[0];
      const imageCapture = new ImageCapture(videoTrack);
      
      const sendFrame = (action) => {
        imageCapture.grabFrame()
          .then((imageBitmap) => {
            const canvas = document.createElement('canvas');
            canvas.width = imageBitmap.width;
            canvas.height = imageBitmap.height;
            const ctx = canvas.getContext('2d');
            ctx.drawImage(imageBitmap, 0, 0);
            canvas.toBlob((blob) => {
              this.socket.emit('frame', { action, blob });
              recordOperation(`User sent frame for ${action}`); // 记录发送帧操作
            }, 'image/jpeg');
          })
          .catch((error) => console.error('Error grabbing frame:', error));
      };

      this.socket.on('connect', () => {
        console.log('Connected to server');
        recordOperation('User connected to detection server'); // 记录连接服务器操作
      });

      this.socket.on('detection-result', (data) => {
        console.log("Detection result:", data);
        this.result = data.result;
        this.score = data.score;
        this.onetext = data.result; // 更新单项检测结果
        recordOperation(`User received detection result: ${data.result}, score: ${data.score}`); // 记录接收检测结果操作
      });

      this.socket.on('disconnect', () => {
        console.log('Disconnected from server');
        recordOperation('User disconnected from detection server'); // 记录断开服务器连接操作
      });

      this.sendFrame = sendFrame; // 将sendFrame方法保存到组件实例
    },
    handleSingleDetection() {
      // 清空结果
      this.result = "";
      this.score = 0;
      this.onetext = "";
      let action;
      switch (this.radio1) {
        case '眨眼检测':
          action = '眨眼检测';
          break;
        case '张嘴检测':
          action = '张嘴检测';
          break;
        case '摇头检测':
          action = '摇头检测';
          break;
        case '点头检测':
          action = '点头检测';
          break;
        default:
          return;
      }

      if (this.useCamera) {
        // 如果使用摄像头，发送帧进行检测
        this.sendFrame(action);
        recordOperation(`User initiated single detection: ${action}`); // 记录单项检测操作
      } else {
        alert("请先打开摄像头");
        recordOperation('User attempted single detection without opening camera'); // 记录未打开摄像头的检测操作
      }
    },
  },
};
</script>

<style>
#livedetection {
  border: 1px solid #ccc;
}
h1 {
  margin-bottom: 20px;
}
.box {
  display: flex;
  justify-content: space-between;
  margin-top: 20px;
  border: 1px solid #ccc;
}
.left, .right {
  width: 45%;
  padding: 20px;
}
.video {
  width: 100%;
  height: 400px;
  background-color: #000;
}
.btns {
  margin-top: 20px;
}
.detail, .text, .item, .result {
  margin-top: 20px;
}
h2, h4, h5 {
  margin-bottom: 10px;
}
</style>
