<template>
  <div class="gesture-recognition">
    <h2>MediaPipe Gesture Recognizer</h2>

    <!-- 视频显示区域 -->
    <div class="video-container">
      <video ref="videoElement" width="640" height="480" autoplay muted playsinline class="video"></video>
      <canvas ref="canvasElement" width="640" height="480" class="canvas-overlay"></canvas>
    </div>

    <!-- 检测到的手势信息 -->
    <div class="gesture-info">
      <h3>识别到的手势：</h3>
      <div class="gesture-item">
        <span class="hand-label">序号</span>
        <span class="gesture-name">手势</span>
        <span class="handedness-name">惯用手</span>
      </div>
      <div v-for="(gesture, index) in gestures" :key="index" class="gesture-item">
        <span class="hand-label">手 {{ index + 1 }}</span>
        <span class="gesture-name">{{ gesture.categoryName }}</span>
        <span class="handedness-name">{{ handedness[index].categoryName }}</span>
      </div>
    </div>

    <!-- 控制按钮 -->
    <div class="controls">
      <button @click="startCamera" :disabled="isRunning || !isModelReady">
        {{ isRunning ? '摄像头运行中...' : '启动摄像头' }}
      </button>
      <button @click="stopCamera" :disabled="!isRunning">
        停止摄像头
      </button>
    </div>
  </div>
</template>

<script setup lang="ts">
import { ref, onMounted } from 'vue'
import {
  FilesetResolver,
  GestureRecognizer,
  GestureRecognizerResult,
  DrawingUtils
} from '@mediapipe/tasks-vision'

// 响应式数据
const videoElement = ref<HTMLVideoElement | null>(null)
const canvasElement = ref<HTMLCanvasElement | null>(null)
const canvasCtx = ref<CanvasRenderingContext2D | null>(null)

const isRunning = ref<boolean>(false)
const isLoading = ref<boolean>(true)
const isModelReady = ref<boolean>(false)
const gestures = ref<GestureRecognizerResult[]>([])
const handedness = ref<GestureRecognizerResult[]>([])


// MediaPipe 相关变量
let gestureRecognizer: GestureRecognizer | null = null
let filesetResolver: FilesetResolver | null = null
let camera: any = null

// 手部关键点绘制工具
let drawingUtils: DrawingUtils | null = null

// 2. 启动摄像头
const startCamera = async () => {
  if (!gestureRecognizer || !videoElement.value) return

  try {
    isRunning.value = true
    isLoading.value = false

    // 获取用户摄像头权限
    const stream = await navigator.mediaDevices.getUserMedia({
      video: { width: 640, height: 480 }
    })
    if (videoElement.value) {
      videoElement.value.srcObject = stream
    }

    // 等待视频加载
    await new Promise<void>((resolve) => {
      videoElement.value?.addEventListener('loadeddata', () => {
        resolve()
      })
    })

    // 初始化绘图工具
    if (canvasElement.value) {
      canvasCtx.value = canvasElement.value.getContext('2d')
      drawingUtils = new DrawingUtils(canvasCtx.value)
    }

    // 开始每帧识别
    recognizeGestures()

    console.log('[手势识别] 🎥 摄像头启动成功，开始识别手势')
  } catch (error) {
    console.error('[手势识别] 摄像头启动失败:', error)
    alert('无法访问摄像头，请检查权限设置')
    isRunning.value = false
  }
}

// 3. 停止摄像头
const stopCamera = () => {
  isRunning.value = false

  if (camera) {
    camera.stop()
  }

  if (videoElement.value && videoElement.value.srcObject) {
    const tracks = (videoElement.value.srcObject as MediaStream).getTracks()
    tracks.forEach((track) => track.stop())
    videoElement.value.srcObject = null
  }

  // 清空画布和结果
  if (canvasElement.value && canvasCtx.value) {
    canvasCtx.value.clearRect(0, 0, 640, 480)
  }

  gestures.value = []
  console.log('[手势识别] 🛑 摄像头已停止')
}

// 4. 每一帧进行手势识别
const recognizeGestures = () => {
  if (
    !isRunning.value ||
    !gestureRecognizer ||
    !videoElement.value ||
    !canvasElement.value
  ) {
    return
  }

  const video = videoElement.value
  const canvas = canvasElement.value

  if (video.readyState >= 2) {
    // 将当前视频帧作为输入，进行手势识别
    const gestureResult = gestureRecognizer.recognizeForVideo(video, performance.now() / 1000)
    // 清空画布
    if (canvasCtx.value) {
      canvasCtx.value.clearRect(0, 0, canvas.width, canvas.height)
    }

    // 更新手势结果
    gestures.value = gestureResult.gestures.map((gesture) => gesture[0]) || []
    handedness.value = gestureResult.handedness.map((handedness) => handedness[0]) || []

    // 绘制手部关键点和连线
    if (gestureResult.handedness && gestureResult.landmarks) {
      for (let i = 0; i < gestureResult.landmarks.length; i++) {
        const landmarks = gestureResult.landmarks[i]
        // 官方推荐使用 DrawingUtils 来绘制
        if (drawingUtils) {
          drawingUtils.drawConnectors(landmarks, GestureRecognizer.HAND_CONNECTIONS, {
            color: '#00FF00',
            lineWidth: 2
          })
          drawingUtils.drawLandmarks(landmarks, {
            color: '#FF0000',
            lineWidth: 1,
            radius: 3
          })
        }
      }
    }
  }

  requestAnimationFrame(recognizeGestures)
}

onMounted(async () => {
  // 1. 加载手势识别模型
  try {
    isLoading.value = true
    console.log('[手势识别] 开始加载模型...')

    // 加载 Vision 的 FilesetResolver（包含手势模型）
    filesetResolver = await FilesetResolver.forVisionTasks(
      './models'
    )

    // 创建 GestureRecognizer 实例
    gestureRecognizer = await GestureRecognizer.createFromOptions(filesetResolver, {
      baseOptions: {
        modelAssetPath: `./models/gesture_recognizer.task`,
        delegate: 'GPU' // 如果支持的话使用 GPU，也可以用 'CPU'
      },
      runningMode: 'VIDEO', // 重要：因为我们用摄像头实时识别
      numHands: 2,
    })

    isModelReady.value = true
    isLoading.value = false
    console.log('[手势识别] ✅ 模型加载成功！')
  } catch (error) {
    console.error('[手势识别] 模型加载失败:', error)
    alert('模型加载失败，请检查网络或刷新页面重试')
    isLoading.value = false
  }
})
</script>

<style scoped>
.gesture-recognition {
  padding: 20px;
  font-family: Arial, sans-serif;
  max-width: 800px;
  margin: 0 auto;
}

.video-container {
  position: relative;
  display: inline-block;
  margin-bottom: 20px;
}

.video,
.canvas-overlay {
  border: 1px solid #ccc;
  border-radius: 8px;
}

.canvas-overlay {
  position: absolute;
  top: 0;
  left: 0;
  pointer-events: none;
}

.controls {
  margin: 20px 0;
}

.controls button {
  margin-right: 10px;
  padding: 10px 15px;
  font-size: 14px;
  cursor: pointer;
  border: 1px solid #aaa;
  border-radius: 4px;
  background: #f9f9f9;
}

.controls button:disabled {
  opacity: 0.5;
  cursor: not-allowed;
}

.gesture-info {
  margin-top: 20px;
  padding: 15px;
  background: #f0f8ff;
  border-radius: 8px;
}

.gesture-item {
  margin-left: 50%;
  transform: translateX(-50%);
  font-size: 14px;
  display: flex;
}

.hand-label {
  font-weight: bold;
  flex: 3;
}

.gesture-name {
  color: #007bff;
  font-weight: bold;
  flex: 4;
}

.handedness-name {
  color: red;
  font-weight: bold;
  flex: 3;
}

.loading {
  margin-top: 20px;
  font-style: italic;
  color: #666;
}
</style>