<template>
  <!-- AI对话面板 -->
  <transition name="ai-chat">
    <div 
      v-if="visible" 
      class="ai-chat-panel" 
      :class="{ 'ai-chat-fullscreen': isFullscreen }"
      :style="panelStyle"
      @mousedown="handlePanelMouseDown"
    >
      <div class="ai-chat-header">
        <div class="header-info">
          <div class="ai-avatar-small">
            <el-icon :size="20"><MagicStick /></el-icon>
          </div>
        <div>
          <div class="ai-title">AI助手</div>
            <div class="ai-status" :class="{ 'status-connected': connected, 'status-disconnected': !connected }">
              {{ connected ? '在线' : '离线' }}
            </div>
        </div>
        </div>
        <div class="header-actions">
          <button class="header-btn" @click="toggleFullscreen" :title="isFullscreen ? '退出全屏' : '全屏'">
            <el-icon v-if="!isFullscreen" :size="18"><FullScreen /></el-icon>
            <el-icon v-else :size="18"><CopyDocument /></el-icon>
          </button>
          <button class="header-btn" @click="$emit('close')" title="关闭">
            <el-icon :size="18"><Close /></el-icon>
          </button>
        </div>
      </div>
      <div class="ai-chat-messages" ref="messagesRef">
        <div
          v-for="(msg, index) in messages"
          :key="index"
          class="ai-message"
          :class="msg.role"
        >
          <div v-if="msg.role === 'assistant' || msg.role === 'reasoning'" class="msg-avatar">
            <el-icon v-if="msg.role === 'assistant'"><MagicStick /></el-icon>
            <el-icon v-else><TrendCharts /></el-icon>
          </div>
          <div class="msg-content">
            <div class="msg-header">
              <div 
                class="msg-text" 
                :class="{ 'reasoning-text': msg.role === 'reasoning', 'markdown-content': msg.role === 'assistant' }"
                v-html="msg.role === 'assistant' ? renderMarkdown(msg.content) : msg.content"
              ></div>
              <button 
                v-if="msg.role === 'assistant' || msg.role === 'reasoning'" 
                class="msg-speak-btn"
                @click="toggleSpeak(msg.content, index)"
                :class="{ 'speaking': currentSpeakingIndex === index }"
              >
                <el-icon v-if="currentSpeakingIndex === index" :class="{ 'voice-pulse': true }">
                  <Microphone />
                </el-icon>
                <el-icon v-else>
                  <VideoPlay />
                </el-icon>
              </button>
            </div>
            <div class="msg-time">{{ formatMessageTime(Date.now()) }}</div>
          </div>
        </div>
      </div>
      <div class="ai-chat-input">
        <div class="input-wrapper">
            <el-button
              circle
              :type="isVoiceRecording ? 'danger' : 'default'"
              :class="{ 'voice-active': isVoiceRecording }"
              @click="toggleVoiceInput"
              class="voice-input-btn"
            >
              <el-icon :class="{ 'voice-pulse': isVoiceRecording }">
                <Microphone />
              </el-icon>
            </el-button>
            <el-input
              v-model="input"
              placeholder="输入消息，或点击麦克风语音输入..."
              @keyup.enter="handleSend"
              class="ai-input-field"
            >
              <template #suffix>
                <el-button text @click="handleSend" :disabled="!input.trim() || loading" :loading="loading">
                  <el-icon><Promotion /></el-icon>
                </el-button>
              </template>
            </el-input>
        </div>
      </div>
      
      <!-- 调整大小手柄 -->
      <div 
        v-if="!isFullscreen" 
        class="ai-chat-resize-handle"
        @mousedown.stop="handleResizeStart"
      ></div>
    </div>
  </transition>

  <!-- AI对话遮罩层 -->
  <div v-if="visible" class="ai-chat-overlay" @click="$emit('close')"></div>

  <!-- 语音识别动画面板 -->
  <transition name="voice-overlay">
    <div v-if="isVoiceRecording" class="voice-recognition-overlay">
      <div class="voice-recognition-panel">
        <!-- 动画区域 -->
        <div class="voice-animation-area">
          <!-- 中心圆环 -->
          <div class="voice-circle-container">
            <div class="voice-circle voice-circle-outer"></div>
            <div class="voice-circle voice-circle-middle"></div>
            <div class="voice-circle voice-circle-inner">
              <el-icon :size="48" class="voice-mic-icon">
                <Microphone />
              </el-icon>
            </div>
          </div>

          <!-- 波形动画 -->
          <div class="voice-waves">
            <div class="voice-wave" v-for="i in 8" :key="i" :style="{ '--wave-index': i }"></div>
          </div>
        </div>

        <!-- 识别状态 -->
        <div class="voice-status">
          <div class="status-text">{{ voiceTranscript || '正在聆听，请说话...' }}</div>
          <div class="status-hint">
            <el-tag size="small" effect="plain">中文识别</el-tag>
            <el-tag size="small" effect="plain" type="success">实时转换</el-tag>
          </div>
        </div>

        <!-- 操作提示 -->
        <div class="voice-actions">
          <el-button type="danger" round @click="stopVoiceInput">
            <el-icon><Close /></el-icon>
            停止录音
          </el-button>
          <div class="voice-tip">
            <el-text size="small" type="info">
              说完后自动发送，或点击停止按钮
            </el-text>
          </div>
        </div>
      </div>
    </div>
  </transition>
</template>

<script setup>
import { ref, watch, nextTick, onMounted, onUnmounted, computed } from 'vue'
import { ElMessage } from 'element-plus'
import { MagicStick, Close, Microphone, Promotion, QuestionFilled, TrendCharts, FullScreen, CopyDocument, VideoPlay } from '@element-plus/icons-vue'
import { createAgentAi } from '@/uses/agentAi'
import { marked } from 'marked'
import DOMPurify from 'dompurify'

const props = defineProps({
  visible: {
    type: Boolean,
    default: false
  }
})

const emit = defineEmits(['close'])

// 初始化AI助手
const { ws, sendMessage, agentModel, connected, messages, loading } = createAgentAi()

// 本地状态
const input = ref('')
const messagesRef = ref(null)
const isVoiceRecording = ref(false)
const voiceRecognition = ref(null)
const isVoiceSupported = ref(false)
const voiceTranscript = ref('')

// 语音朗读状态
const isSpeaking = ref(false)
const currentSpeakingIndex = ref(-1)
const speechSynthesis = ref(null)

// 窗口状态
const isFullscreen = ref(false)
const panelWidth = ref(400)
const panelHeight = ref(600)
const panelX = ref(null)
const panelY = ref(null)
let isDragging = false
let isResizing = false
let resizeStartX = 0
let resizeStartY = 0
let resizeStartWidth = 0
let resizeStartHeight = 0

// 计算属性
const panelStyle = computed(() => {
  if (isFullscreen.value) {
    return {
      width: '100vw',
      height: '100vh',
      top: '0',
      left: '0',
      bottom: '0',
      right: '0'
    }
  } else {
    return {
      width: `${panelWidth.value}px`,
      height: `${panelHeight.value}px`
    }
  }
})

// 格式化消息时间
const formatMessageTime = (timestamp) => {
  return new Date(timestamp).toLocaleTimeString('zh-CN', {
    hour: '2-digit',
    minute: '2-digit'
  })
}

// 渲染Markdown
const renderMarkdown = (text) => {
  if (!text) return ''
  try {
    const html = marked.parse(text)
    return DOMPurify.sanitize(html)
  } catch (error) {
    console.error('Markdown渲染失败:', error)
    return text
  }
}

// 滚动到底部
const scrollToBottom = () => {
  nextTick(() => {
    if (messagesRef.value) {
      messagesRef.value.scrollTop = messagesRef.value.scrollHeight
    }
  })
}

// 处理发送消息
const handleSend = () => {
  if (!input.value.trim() || loading.value) return
  
  const userMessage = input.value.trim()
  input.value = ''
  
  // 设置加载状态
  loading.value = true

  // 发送消息到WebSocket
  if (connected.value) {
    // 发送到WebSocket
    sendMessage(userMessage)
  } else {
    ElMessage.warning('AI助手未连接，请稍候再试')
    loading.value = false
  }
}

// 监听AI消息变化，自动滚动到底部
watch(() => messages.value, () => {
  scrollToBottom()
}, { deep: true })

// 监听AI加载状态
watch(() => loading.value, (isLoading) => {
  if (!isLoading) {
    scrollToBottom()
  }
})

// 初始化语音识别
const initVoiceRecognition = () => {
  const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition

  if (!SpeechRecognition) {
    console.warn('当前浏览器不支持语音识别')
    isVoiceSupported.value = false
    return
  }

  isVoiceSupported.value = true
  voiceRecognition.value = new SpeechRecognition()

  // 配置语音识别
  voiceRecognition.value.lang = 'zh-CN'
  voiceRecognition.value.continuous = false
  voiceRecognition.value.interimResults = true
  voiceRecognition.value.maxAlternatives = 1

  // 识别结果事件
  voiceRecognition.value.onresult = (event) => {
    const result = event.results[event.results.length - 1]
    const transcript = result[0].transcript

    if (result.isFinal) {
      // 最终结果
      voiceTranscript.value = transcript

      setTimeout(() => {
        isVoiceRecording.value = false
        input.value = transcript
        voiceTranscript.value = ''

        // 自动发送消息
        setTimeout(() => {
          handleSend()
        }, 500)
      }, 800)
    } else {
      // 实时结果
      voiceTranscript.value = transcript
    }
  }

  // 识别开始
  voiceRecognition.value.onstart = () => {
    console.log('语音识别已开始')
    voiceTranscript.value = ''
  }

  // 识别结束
  voiceRecognition.value.onend = () => {
    console.log('语音识别已结束')
    if (isVoiceRecording.value && !voiceTranscript.value) {
      // 没有识别到内容
      isVoiceRecording.value = false
      ElMessage.warning('未识别到语音，请重试')
    }
  }

  // 识别错误
  voiceRecognition.value.onerror = (event) => {
    console.error('语音识别错误:', event.error)
    isVoiceRecording.value = false
    voiceTranscript.value = ''

    const errorMessages = {
      'no-speech': '未检测到语音，请重试',
      'audio-capture': '无法访问麦克风，请检查权限',
      'not-allowed': '麦克风权限被拒绝，请在浏览器设置中允许',
      'network': '网络错误，请检查网络连接',
      'aborted': '语音识别已取消'
    }

    const message = errorMessages[event.error] || '语音识别失败，请重试'

    if (event.error !== 'aborted') {
      ElMessage.error(message)
    }
  }
}

// 开始语音输入
const startVoiceInput = () => {
  if (!voiceRecognition.value) return
  try {
    isVoiceRecording.value = true
    voiceTranscript.value = ''
    voiceRecognition.value.start()
  } catch (error) {
    console.error('语音识别启动失败:', error)
    ElMessage.error('语音识别启动失败，请稍后重试')
    isVoiceRecording.value = false
  }
}

// 停止语音输入
const stopVoiceInput = () => {
  if (voiceRecognition.value && isVoiceRecording.value) {
    voiceRecognition.value.stop()
    isVoiceRecording.value = false
    voiceTranscript.value = ''
  }
}

// 切换语音输入
const toggleVoiceInput = () => {
  if (!isVoiceSupported.value) {
    ElMessage.warning('您的浏览器不支持语音识别，请使用Chrome或Edge浏览器')
    return
  }

  if (isVoiceRecording.value) {
    stopVoiceInput()
  } else {
    startVoiceInput()
  }
}

// 初始化语音朗读
const initSpeechSynthesis = () => {
  if ('speechSynthesis' in window) {
    speechSynthesis.value = window.speechSynthesis
  }
}

// 开始语音朗读
const startSpeak = (text, index) => {
  if (!speechSynthesis.value) {
    ElMessage.warning('您的浏览器不支持语音朗读')
    return
  }

  // 如果正在朗读，先停止
  if (isSpeaking.value) {
    speechSynthesis.value.cancel()
  }

  // 提取纯文本内容（去除Markdown标记）
  const plainText = stripMarkdown(text)

  // 创建朗读实例
  const utterance = new SpeechSynthesisUtterance(plainText)
  utterance.lang = 'zh-CN'
  utterance.rate = 1.0 // 语速
  utterance.pitch = 1.0 // 音调
  utterance.volume = 1.0 // 音量

  // 朗读开始
  utterance.onstart = () => {
    isSpeaking.value = true
    currentSpeakingIndex.value = index
  }

  // 朗读结束
  utterance.onend = () => {
    isSpeaking.value = false
    currentSpeakingIndex.value = -1
  }

  // 朗读错误
  utterance.onerror = (error) => {
    console.error('语音朗读错误:', error)
    isSpeaking.value = false
    currentSpeakingIndex.value = -1
    ElMessage.error('语音朗读失败')
  }

  speechSynthesis.value.speak(utterance)
}

// 停止语音朗读
const stopSpeak = () => {
  if (speechSynthesis.value && isSpeaking.value) {
    speechSynthesis.value.cancel()
    isSpeaking.value = false
    currentSpeakingIndex.value = -1
  }
}

// 切换语音朗读
const toggleSpeak = (text, index) => {
  if (isSpeaking.value && currentSpeakingIndex.value === index) {
    stopSpeak()
  } else {
    startSpeak(text, index)
  }
}

// 去除Markdown标记，提取纯文本
const stripMarkdown = (markdownText) => {
  if (!markdownText) return ''
  
  // 创建一个临时div来解析HTML
  const div = document.createElement('div')
  div.innerHTML = markdownText
  
  // 移除code块、链接等元素的标签，保留文本
  const codeBlocks = div.querySelectorAll('code, pre')
  codeBlocks.forEach(el => {
    el.replaceWith(document.createTextNode(el.textContent))
  })
  
  const links = div.querySelectorAll('a')
  links.forEach(el => {
    el.replaceWith(document.createTextNode(el.textContent))
  })
  
  return div.textContent || div.innerText || ''
}

// 全屏切换
const toggleFullscreen = () => {
  isFullscreen.value = !isFullscreen.value
}

// 面板鼠标按下
const handlePanelMouseDown = (event) => {
  if (event.target.closest('.ai-chat-header') && !isFullscreen.value) {
    isDragging = true
    const rect = event.currentTarget.getBoundingClientRect()
    panelX.value = event.clientX - rect.left
    panelY.value = event.clientY - rect.top
    
    const onMouseMove = (e) => {
      if (!isDragging) return
      // 这里可以实现拖拽功能，暂时简化处理
    }
    
    const onMouseUp = () => {
      isDragging = false
      document.removeEventListener('mousemove', onMouseMove)
      document.removeEventListener('mouseup', onMouseUp)
    }
    
    document.addEventListener('mousemove', onMouseMove)
    document.addEventListener('mouseup', onMouseUp)
  }
}

// 开始调整大小
const handleResizeStart = (event) => {
  if (isFullscreen.value) return
  
  isResizing = true
  resizeStartX = event.clientX
  resizeStartY = event.clientY
  resizeStartWidth = panelWidth.value
  resizeStartHeight = panelHeight.value
  
  const onMouseMove = (e) => {
    if (!isResizing) return
    
    const deltaX = e.clientX - resizeStartX
    const deltaY = e.clientY - resizeStartY
    
    panelWidth.value = Math.max(300, Math.min(800, resizeStartWidth + deltaX))
    panelHeight.value = Math.max(400, Math.min(window.innerHeight - 100, resizeStartHeight + deltaY))
  }
  
  const onMouseUp = () => {
    isResizing = false
    document.removeEventListener('mousemove', onMouseMove)
    document.removeEventListener('mouseup', onMouseUp)
  }
  
  document.addEventListener('mousemove', onMouseMove)
  document.addEventListener('mouseup', onMouseUp)
}

onMounted(() => {
  initVoiceRecognition()
  initSpeechSynthesis()
  
  // 监听连接状态
  watch(() => connected.value, (isConnected) => {
    if (isConnected) {
      console.log('AI助手已连接')
    } else {
      console.log('AI助手已断开')
    }
  })
})

onUnmounted(() => {
  stopVoiceInput()
  stopSpeak()
})

// 暴露方法给父组件
defineExpose({
  scrollToBottom
})
</script>

<style scoped>
.ai-chat-panel {
  position: fixed;
  bottom: 60px;
  right: 12px;
  background: white;
  border-radius: 12px;
  box-shadow: 0 20px 60px rgba(0, 0, 0, 0.3);
  z-index: 1003;
  display: flex;
  flex-direction: column;
  overflow: hidden;
  cursor: default;
  user-select: none;
}

.ai-chat-panel.ai-chat-fullscreen {
  border-radius: 0;
  bottom: 0;
  right: 0;
  width: 100vw !important;
  height: 100vh !important;
  top: 0 !important;
  left: 0 !important;
}

.ai-chat-header {
  padding: 16px;
  background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
  display: flex;
  align-items: center;
  justify-content: space-between;
  color: white;
  cursor: move;
  flex-shrink: 0;
}

.header-actions {
  display: flex;
  align-items: center;
  gap: 4px;
}

.header-btn {
  width: 32px;
  height: 32px;
  display: flex;
  align-items: center;
  justify-content: center;
  background: rgba(255, 255, 255, 0.15);
  border: none;
  border-radius: 6px;
  color: white;
  cursor: pointer;
  transition: all 0.2s;
}

.header-btn:hover {
  background: rgba(255, 255, 255, 0.3);
  transform: scale(1.05);
}

.header-btn:active {
  transform: scale(0.95);
}

.ai-avatar-small {
  width: 36px;
  height: 36px;
  background: rgba(255, 255, 255, 0.2);
  border-radius: 50%;
  display: flex;
  align-items: center;
  justify-content: center;
  margin-right: 12px;
}

.header-info {
  display: flex;
  align-items: center;
}

.ai-title {
  font-size: 16px;
  font-weight: 600;
  margin-bottom: 2px;
}

.ai-status {
  font-size: 12px;
  opacity: 0.8;
}

.ai-chat-messages {
  flex: 1;
  overflow-y: auto;
  padding: 16px;
  display: flex;
  flex-direction: column;
  gap: 12px;
  background: #f5f7fa;
}

.ai-message {
  display: flex;
  gap: 8px;
  align-items: flex-start;
}

.ai-message.user {
  flex-direction: row-reverse;
}

.msg-avatar {
  width: 32px;
  height: 32px;
  background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
  border-radius: 50%;
  display: flex;
  align-items: center;
  justify-content: center;
  color: white;
  flex-shrink: 0;
}

.ai-message.reasoning .msg-avatar {
  background: linear-gradient(135deg, #ff9a56 0%, #ff6a88 100%);
}

.msg-content {
  max-width: 70%;
}

.ai-message.user .msg-content {
  text-align: right;
}

.msg-header {
  display: flex;
  align-items: flex-start;
  gap: 8px;
}

.msg-text {
  background: white;
  padding: 10px 14px;
  border-radius: 12px;
  font-size: 14px;
  line-height: 1.5;
  box-shadow: 0 2px 4px rgba(0, 0, 0, 0.05);
  flex: 1;
  min-width: 0;
}

.msg-speak-btn {
  width: 28px;
  height: 28px;
  display: flex;
  align-items: center;
  justify-content: center;
  background: rgba(255, 255, 255, 0.9);
  border: 1px solid rgba(0, 0, 0, 0.1);
  border-radius: 6px;
  color: #409eff;
  cursor: pointer;
  transition: all 0.2s;
  flex-shrink: 0;
  margin-top: 4px;
}

.msg-speak-btn:hover {
  background: #409eff;
  color: white;
  transform: scale(1.05);
}

.msg-speak-btn.speaking {
  background: #67c23a;
  color: white;
  border-color: #67c23a;
  animation: speak-pulse 1.5s infinite;
}

@keyframes speak-pulse {
  0%, 100% {
    box-shadow: 0 0 0 0 rgba(103, 194, 58, 0.7);
  }
  50% {
    box-shadow: 0 0 0 8px rgba(103, 194, 58, 0);
  }
}

.ai-message.user .msg-text {
  background: #0078d4;
  color: white;
}

.reasoning-text {
  background: linear-gradient(135deg, #fef3c7 0%, #fde68a 100%);
  border-left: 3px solid #f59e0b;
  font-style: italic;
  color: #78350f;
}

.markdown-content {
  word-wrap: break-word;
  line-height: 1.6;
}

.markdown-content :deep(h1),
.markdown-content :deep(h2),
.markdown-content :deep(h3),
.markdown-content :deep(h4),
.markdown-content :deep(h5),
.markdown-content :deep(h6) {
  margin: 16px 0 8px 0;
  font-weight: 600;
  line-height: 1.25;
}

.markdown-content :deep(h1) { font-size: 1.75em; border-bottom: 1px solid #eaecef; padding-bottom: 0.3em; }
.markdown-content :deep(h2) { font-size: 1.5em; border-bottom: 1px solid #eaecef; padding-bottom: 0.3em; }
.markdown-content :deep(h3) { font-size: 1.25em; }
.markdown-content :deep(h4) { font-size: 1em; }
.markdown-content :deep(h5) { font-size: 0.875em; }
.markdown-content :deep(h6) { font-size: 0.85em; color: #6a737d; }

.markdown-content :deep(p) {
  margin: 8px 0;
}

.markdown-content :deep(a) {
  color: #0366d6;
  text-decoration: none;
}

.markdown-content :deep(a:hover) {
  text-decoration: underline;
}

.markdown-content :deep(code) {
  padding: 2px 6px;
  background: #f6f8fa;
  border-radius: 3px;
  font-family: 'Courier New', Courier, monospace;
  font-size: 0.9em;
  color: #e83e8c;
}

.markdown-content :deep(pre) {
  padding: 12px;
  background: #f6f8fa;
  border-radius: 6px;
  overflow-x: auto;
  margin: 8px 0;
}

.markdown-content :deep(pre code) {
  padding: 0;
  background: transparent;
  color: inherit;
  font-size: 0.9em;
}

.markdown-content :deep(blockquote) {
  padding: 0 1em;
  color: #6a737d;
  border-left: 4px solid #dfe2e5;
  margin: 8px 0;
}

.markdown-content :deep(ul),
.markdown-content :deep(ol) {
  margin: 8px 0;
  padding-left: 2em;
}

.markdown-content :deep(li) {
  margin: 4px 0;
}

.markdown-content :deep(table) {
  border-collapse: collapse;
  margin: 12px 0;
  width: 100%;
}

.markdown-content :deep(th),
.markdown-content :deep(td) {
  border: 1px solid #dfe2e5;
  padding: 6px 13px;
}

.markdown-content :deep(th) {
  background: #f6f8fa;
  font-weight: 600;
}

.markdown-content :deep(img) {
  max-width: 100%;
  height: auto;
  border-radius: 6px;
  margin: 8px 0;
}

.markdown-content :deep(hr) {
  border: none;
  border-top: 1px solid #eaecef;
  margin: 16px 0;
}

.msg-time {
  font-size: 11px;
  color: #8a8886;
  margin-top: 4px;
  padding: 0 4px;
}

.ai-chat-input {
  padding: 16px;
  border-top: 1px solid #e4e7ed;
  background: white;
  flex-shrink: 0;
}

.ai-chat-resize-handle {
  position: absolute;
  bottom: 0;
  right: 0;
  width: 20px;
  height: 20px;
  cursor: nw-resize;
  background: linear-gradient(135deg, transparent 50%, rgba(0, 0, 0, 0.1) 50%);
  z-index: 10;
}

.input-wrapper {
  display: flex;
  align-items: center;
  gap: 12px;
}

.voice-input-btn {
  flex-shrink: 0;
  position: relative;
}

.voice-input-btn.voice-active {
  animation: voice-btn-pulse 1.5s infinite;
}

@keyframes voice-btn-pulse {
  0%, 100% {
    transform: scale(1);
    box-shadow: 0 0 0 0 rgba(245, 108, 108, 0.7);
  }
  50% {
    transform: scale(1.05);
    box-shadow: 0 0 0 10px rgba(245, 108, 108, 0);
  }
}

.voice-pulse {
  animation: icon-pulse 1s infinite;
}

@keyframes icon-pulse {
  0%, 100% { transform: scale(1); }
  50% { transform: scale(1.2); }
}

.ai-input-field {
  flex: 1;
}

.ai-chat-overlay {
  position: fixed;
  top: 0;
  left: 0;
  right: 0;
  bottom: 0;
  background: transparent;
  z-index: 1002;
}

/* 语音识别动画覆盖层 */
.voice-recognition-overlay {
  position: fixed;
  top: 0;
  left: 0;
  right: 0;
  bottom: 48px;
  background: rgba(0, 0, 0, 0.85);
  backdrop-filter: blur(20px);
  display: flex;
  align-items: center;
  justify-content: center;
  z-index: 2000;
}

.voice-recognition-panel {
  text-align: center;
  max-width: 600px;
  padding: 60px 40px;
}

/* 语音动画区域 */
.voice-animation-area {
  position: relative;
  width: 300px;
  height: 300px;
  margin: 0 auto 40px;
}

.voice-circle-container {
  position: absolute;
  top: 50%;
  left: 50%;
  transform: translate(-50%, -50%);
}

.voice-circle {
  position: absolute;
  border-radius: 50%;
  display: flex;
  align-items: center;
  justify-content: center;
}

.voice-circle-outer {
  width: 280px;
  height: 280px;
  border: 3px solid rgba(102, 126, 234, 0.3);
  animation: circle-pulse-outer 3s ease-in-out infinite;
  top: 50%;
  left: 50%;
  transform: translate(-50%, -50%);
}

.voice-circle-middle {
  width: 200px;
  height: 200px;
  border: 3px solid rgba(102, 126, 234, 0.5);
  animation: circle-pulse-middle 2s ease-in-out infinite;
  top: 50%;
  left: 50%;
  transform: translate(-50%, -50%);
}

.voice-circle-inner {
  width: 120px;
  height: 120px;
  background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
  box-shadow: 0 10px 40px rgba(102, 126, 234, 0.5);
  animation: circle-pulse-inner 1.5s ease-in-out infinite;
  top: 50%;
  left: 50%;
  transform: translate(-50%, -50%);
}

.voice-mic-icon {
  color: white;
  animation: mic-bounce 1s ease-in-out infinite;
}

@keyframes circle-pulse-outer {
  0%, 100% {
    transform: translate(-50%, -50%) scale(1);
    opacity: 0.3;
  }
  50% {
    transform: translate(-50%, -50%) scale(1.1);
    opacity: 0.6;
  }
}

@keyframes circle-pulse-middle {
  0%, 100% {
    transform: translate(-50%, -50%) scale(1);
    opacity: 0.5;
  }
  50% {
    transform: translate(-50%, -50%) scale(1.15);
    opacity: 0.8;
  }
}

@keyframes circle-pulse-inner {
  0%, 100% {
    transform: translate(-50%, -50%) scale(1);
    box-shadow: 0 10px 40px rgba(102, 126, 234, 0.5);
  }
  50% {
    transform: translate(-50%, -50%) scale(1.05);
    box-shadow: 0 15px 50px rgba(102, 126, 234, 0.8);
  }
}

@keyframes mic-bounce {
  0%, 100% { transform: translateY(0); }
  50% { transform: translateY(-5px); }
}

/* 波形动画 */
.voice-waves {
  position: absolute;
  top: 50%;
  left: 50%;
  transform: translate(-50%, -50%);
  width: 100%;
  height: 100%;
  pointer-events: none;
}

.voice-wave {
  position: absolute;
  width: 4px;
  height: 60px;
  background: linear-gradient(180deg, #667eea 0%, #764ba2 100%);
  border-radius: 2px;
  top: 50%;
  left: 50%;
  animation: wave-animation 1.2s ease-in-out infinite;
  opacity: 0.8;
}

.voice-wave:nth-child(1) {
  transform: translate(-50%, -50%) rotate(0deg) translateY(-140px);
  animation-delay: calc(var(--wave-index, 1) * 0.1s);
}
.voice-wave:nth-child(2) {
  transform: translate(-50%, -50%) rotate(45deg) translateY(-140px);
  animation-delay: calc(var(--wave-index, 2) * 0.1s);
}
.voice-wave:nth-child(3) {
  transform: translate(-50%, -50%) rotate(90deg) translateY(-140px);
  animation-delay: calc(var(--wave-index, 3) * 0.1s);
}
.voice-wave:nth-child(4) {
  transform: translate(-50%, -50%) rotate(135deg) translateY(-140px);
  animation-delay: calc(var(--wave-index, 4) * 0.1s);
}
.voice-wave:nth-child(5) {
  transform: translate(-50%, -50%) rotate(180deg) translateY(-140px);
  animation-delay: calc(var(--wave-index, 5) * 0.1s);
}
.voice-wave:nth-child(6) {
  transform: translate(-50%, -50%) rotate(225deg) translateY(-140px);
  animation-delay: calc(var(--wave-index, 6) * 0.1s);
}
.voice-wave:nth-child(7) {
  transform: translate(-50%, -50%) rotate(270deg) translateY(-140px);
  animation-delay: calc(var(--wave-index, 7) * 0.1s);
}
.voice-wave:nth-child(8) {
  transform: translate(-50%, -50%) rotate(315deg) translateY(-140px);
  animation-delay: calc(var(--wave-index, 8) * 0.1s);
}

@keyframes wave-animation {
  0%, 100% {
    height: 40px;
    opacity: 0.4;
  }
  50% {
    height: 80px;
    opacity: 1;
  }
}

/* 语音状态显示 */
.voice-status {
  margin-bottom: 32px;
}

.status-text {
  font-size: 24px;
  font-weight: 500;
  color: white;
  margin-bottom: 16px;
  min-height: 32px;
  text-shadow: 0 2px 8px rgba(0, 0, 0, 0.3);
}

.status-hint {
  display: flex;
  justify-content: center;
  gap: 12px;
}

/* 语音操作按钮 */
.voice-actions {
  display: flex;
  flex-direction: column;
  align-items: center;
  gap: 16px;
}

.voice-tip {
  color: rgba(255, 255, 255, 0.7);
}

/* 动画 */
.ai-chat-enter-active,
.ai-chat-leave-active {
  transition: all 0.3s ease;
}

.ai-chat-enter-from {
  opacity: 0;
  transform: translateY(20px) scale(0.95);
}

.ai-chat-leave-to {
  opacity: 0;
  transform: translateY(20px) scale(0.95);
}

.voice-overlay-enter-active,
.voice-overlay-leave-active {
  transition: all 0.4s ease;
}

.voice-overlay-enter-from {
  opacity: 0;
}

.voice-overlay-enter-from .voice-recognition-panel {
  transform: scale(0.8);
}

.voice-overlay-leave-to {
  opacity: 0;
}

.voice-overlay-leave-to .voice-recognition-panel {
  transform: scale(0.8);
}
</style>

