<template>
  <view class="chat-page">
    <!-- 顶部栏：左侧头像 + 中间标题 + 右侧语音开关 -->
    <view class="page-header">
      <view class="header-avatar">
        <image src="/static/avatar/user.png" class="user-avatar-icon" mode="widthFix" />
      </view>
      <view class="header-title">聊天</view>
      <view class="header-right">
        <button class="global-voice-btn" @click="toggleGlobalVoice">
          <image
              :src="isVoiceGlobalOn ? '/static/icons/speaker-on.png' : '/static/icons/speaker-off.png'"
              class="global-voice-icon"
              mode="widthFix"
          />
        </button>
      </view>
    </view>

    <!-- 聊天容器内容 -->
    <view class="chat-container">
      <!-- 聊天消息列表：自适应高度 + 滚动 -->
      <view class="chat-messages" ref="messageContainer">
        <view
            v-for="(msg, index) in messages"
            :key="msg.id"
            :class="['message-item', { 'user-message': msg.isUser, 'ai-message': !msg.isUser }]"
        >
          <view class="avatar">
            <image :src="msg.isUser ? userAvatar : aiAvatar" mode="widthFix" />
          </view>
          <view class="message-bubble">
            <view class="message-content" v-html="parseMarkdown(msg.content)"></view>
            <view class="message-time">{{ formatTime(msg.timestamp) }}</view>
            <button
                class="message-voice-btn"
                v-if="!msg.isUser"
                @click="playMessageVoice(msg.content)"
            >
              <image src="/static/icons/voice-play.png" class="message-voice-icon" mode="widthFix" />
            </button>
          </view>
          <!-- AI消息右侧的语音播放按钮 -->

        </view>
      </view>

      <!-- 输入区域：固定在底部 + 高可见性 -->
      <view class="chat-input">
        <textarea
            v-model="inputValue"
            class="input-text"
            placeholder="输入消息（支持 Markdown，回车换行，点击发送提交）"
            @keydown.enter="handleEnter"
        ></textarea>
        <button class="send-btn" @click="sendMessage">发送</button>
        <label class="upload-btn">
          上传图片
          <input
              type="file"
              accept="image/*"
              multiple
              @change="handleImageUpload"
              style="display: none;"
          />
        </label>
        <view class="voice-container" :class="{ 'recording': isRecording }">
          <button
              class="voice-btn"
              :disabled="isUploading"
              @touchstart="handleVoiceTouchStart"
              @touchmove="handleVoiceTouchMove"
              @touchend="handleVoiceTouchEnd"
              @touchcancel="handleVoiceTouchCancel"
              @mousedown="handleVoiceMouseDown"
              @mouseup="handleVoiceMouseUp"
              @mousemove="handleGlobalMouseMove"
          >
            <view class="voice-tip" v-if="isRecording">
              <view class="recording-indicator">
                <view class="audio-wave" :class="{ 'animate': isRecording }">
                  <span></span><span></span><span></span><span></span><span></span>
                </view>
              </view>
              <view class="tip-text" :style="{ color: cancelTip ? '#ff4d4f' : '#fff' }">
                {{ cancelTip ? '上滑/离开取消' : '按住录音，松开发送' }}
              </view>
            </view>
            <view v-else>
              <svg
                  class="mic-icon"
                  viewBox="0 0 24 24"
                  fill="none"
                  stroke="currentColor"
                  stroke-width="2"
                  stroke-linecap="round"
                  stroke-linejoin="round"
              >
                <path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z" />
                <path d="M19 10v2a7 7 0 0 1-14 0v-2" />
                <line x1="12" y1="19" x2="12" y2="23" />
                <line x1="8" y1="23" x2="16" y2="23" />
              </svg>
            </view>
          </button>
        </view>
      </view>
    </view>
  </view>
</template>

<script setup>
import { ref, onMounted, nextTick, onUnmounted } from 'vue';
import markdownIt from 'markdown-it';
import request from '@/utils/request.js'; // 引入封装的请求工具

/* ========== 基础响应式变量与工具函数 ========== */
// 聊天消息列表
const messages = ref([]);
// 输入框内容（核心修复点：确保 inputValue 被 ref 正确定义）
const inputValue = ref('');
// 用户/AI头像
const userAvatar = ref('/static/avatar/user.png');
const aiAvatar = ref('/static/avatar/ai.png');
// 消息容器（用于滚动到底部）
const messageContainer = ref(null);
// AI回复文本聚合（用于语音播报）
const aiRepliesText = ref('');
// 全局语音开关状态（默认关闭）
const isVoiceGlobalOn = ref(false);

// 生成唯一ID
const generateUniqueId = () => `msg_${Date.now()}_${Math.floor(Math.random() * 10000)}`;
// Markdown解析器
const md = new markdownIt();
const parseMarkdown = (content) => md.render(content);
// 时间格式化
const formatTime = (timestamp) => {
  const date = new Date(timestamp);
  return `${date.getHours().toString().padStart(2, '0')}:${date.getMinutes().toString().padStart(2, '0')}:${date.getSeconds().toString().padStart(2, '0')}`;
};
// 滚动消息列表到底部
const scrollToBottom = () => {
  if (messageContainer.value) {
    messageContainer.value.scrollTop = messageContainer.value.scrollHeight;
  }
};

/* ========== WebSocket 连接与消息处理 ========== */
const socket = new WebSocket('ws://192.168.3.31:8080/agent/chat/say');

socket.addEventListener('message', async (event) => {
  try {
    const data = JSON.parse(event.data);
    const { content, role, conversationId } = data;
    if (role !== 'assistant' || !content) return;

    let aiMessage = messages.value.find(msg => !msg.isUser && msg.conversationId === conversationId);
    if (!aiMessage) {
      aiMessage = {
        id: generateUniqueId(),
        content,
        isUser: false,
        timestamp: Date.now(),
        conversationId
      };
      messages.value.push(aiMessage);
    } else {
      aiMessage.content += content;
      messages.value = [...messages.value];
    }

    aiRepliesText.value += content;
    await nextTick();
    scrollToBottom();

    // 全局语音开启时，自动播放当前AI消息的语音
    if (isVoiceGlobalOn.value) {
      await playMessageVoice(content);
    }
  } catch (error) {
    messages.value.push({
      id: generateUniqueId(),
      content: '消息解析失败，请重试',
      isUser: false,
      timestamp: Date.now()
    });
    await nextTick();
    scrollToBottom();
  }
});

/* ========== 文本消息与图片上传 ========== */
// 回车换行处理（核心修复点：确保 handleEnter 方法存在）
const handleEnter = (e) => {
  if (e.shiftKey) return;
  e.preventDefault();
  const start = e.target.selectionStart;
  const end = e.target.selectionEnd;
  inputValue.value = `${inputValue.value.slice(0, start)}\n${inputValue.value.slice(end)}`;
  nextTick(() => {
    e.target.selectionStart = e.target.selectionEnd = start + 1;
  });
};

// 发送文本消息（核心修复点：确保 sendMessage 方法存在）
const sendMessage = () => {
  const content = inputValue.value.trim();
  if (!content) return;

  const messageId = generateUniqueId();
  const conversationId = generateUniqueId();
  const userMsg = {
    id: messageId,
    content,
    isUser: true,
    timestamp: Date.now(),
    conversationId
  };
  messages.value.push(userMsg);
  inputValue.value = '';
  socket.send(JSON.stringify({ content, role: 'user', conversationId }));
  scrollToBottom();
};

// 图片上传
const handleImageUpload = async (e) => {
  const files = e.target.files;
  if (!files.length) return;

  const formData = new FormData();
  files.forEach(file => formData.append('files', file));

  try {
    // 调用后端图片上传接口：agent/file/upload
    const imageUrls = await request.post('agent/file/upload', formData, {
      'Content-Type': 'multipart/form-data'
    });

    messages.value.push({
      id: generateUniqueId(),
      content: `已上传图片：${imageUrls.join(', ')}`,
      isUser: true,
      timestamp: Date.now()
    });
    scrollToBottom();
  } catch (error) {
    messages.value.push({
      id: generateUniqueId(),
      content: '图片上传失败，请重试',
      isUser: true,
      timestamp: Date.now()
    });
  }
};

/* ========== 语音交互（录音、上传、播报） ========== */
const isRecording = ref(false);
const isUploading = ref(false);
const mediaRecorder = ref(null);
const audioChunks = ref([]);
const cancelTip = ref(false);
const touchStartY = ref(0);
const mouseStartPos = ref({ x: 0, y: 0 });
const cancelThreshold = ref(30);
const streamRef = ref(null);
const timeoutTimerRef = ref(null);
const recordingPromise = ref(null);
const uploadMsgId = ref('');

// 清理全局鼠标事件
const cleanupGlobalMouseEvents = () => {
  document.removeEventListener('mousemove', handleGlobalMouseMove);
  document.removeEventListener('mouseup', handleVoiceMouseUp);
};

const handleVoiceMouseDown = (e) => {
  if (e.button !== 0 || isRecording.value || isUploading.value) return;
  mouseStartPos.value = { x: e.clientX, y: e.clientY };
  startVoiceRecording();
  document.addEventListener('mousemove', handleGlobalMouseMove);
  document.addEventListener('mouseup', handleVoiceMouseUp);
};

const handleGlobalMouseMove = (e) => {
  if (!isRecording.value) return;
  const diffX = Math.abs(e.clientX - mouseStartPos.value.x);
  const diffY = Math.abs(e.clientY - mouseStartPos.value.y);
  cancelTip.value = diffX > cancelThreshold.value || diffY > cancelThreshold.value;
};

const handleVoiceMouseUp = (e) => {
  if (!isRecording.value) {
    cleanupGlobalMouseEvents();
    return;
  }
  const diffX = Math.abs(e.clientX - mouseStartPos.value.x);
  const diffY = Math.abs(e.clientY - mouseStartPos.value.y);
  const shouldCancel = diffX > cancelThreshold.value || diffY > cancelThreshold.value;
  shouldCancel ? cancelVoiceRecord() : handleVoiceRelease();
  cleanupGlobalMouseEvents();
};

const handleVoiceTouchStart = (e) => {
  if (isRecording.value || isUploading.value) return;
  touchStartY.value = e.touches[0].clientY;
  startVoiceRecording();
};

const handleVoiceTouchMove = (e) => {
  if (!isRecording.value) return;
  const diffY = touchStartY.value - e.touches[0].clientY;
  cancelTip.value = diffY > cancelThreshold.value;
};

const handleVoiceTouchEnd = (e) => {
  if (!isRecording.value) return;
  const diffY = touchStartY.value - e.changedTouches[0].clientY;
  const shouldCancel = diffY > cancelThreshold.value;
  shouldCancel ? cancelVoiceRecord() : handleVoiceRelease();
};

const handleVoiceTouchCancel = () => {
  if (isRecording.value) cancelVoiceRecord();
};

const startVoiceRecording = () => {
  isRecording.value = true;
  cancelTip.value = false;
  audioChunks.value = [];
  timeoutTimerRef.value = setTimeout(() => handleVoiceRelease(), 10000);

  navigator.mediaDevices.getUserMedia({
    audio: {
      sampleRate: 16000,
      channelCount: 1,
      echoCancellation: false,
      noiseSuppression: false,
      autoGainControl: false
    }
  })
      .then((stream) => {
        streamRef.value = stream;
        const tryFormats = ['audio/opus;codecs=opus', 'audio/webm;codecs=opus', 'audio/opus'];
        let formatIndex = 0;

        const tryNextFormat = () => new Promise((resolve, reject) => {
          if (formatIndex >= tryFormats.length) {
            reject(new Error('浏览器不支持OPUS录音'));
            return;
          }
          const mimeType = tryFormats[formatIndex++];
          try {
            const recorder = new MediaRecorder(stream, { mimeType });
            resolve(recorder);
          } catch (err) {
            tryNextFormat().then(resolve).catch(reject);
          }
        });

        tryNextFormat()
            .then((recorder) => {
              mediaRecorder.value = recorder;
              recorder.ondataavailable = (event) => {
                if (event.data.size > 0) audioChunks.value.push(event.data);
              };
              recordingPromise.value = new Promise((resolve) => {
                recorder.onstop = () => {
                  const blob = audioChunks.value.length > 0
                      ? new Blob(audioChunks.value, { type: recorder.mimeType })
                      : null;
                  resolve(blob);
                };
              });
              recorder.start(100);
            })
            .catch((err) => {
              streamRef.value?.getTracks().forEach(t => t.stop());
              resetRecordingState();
              messages.value.push({
                id: generateUniqueId(),
                content: `🎤 录音初始化失败：${err.message}`,
                isUser: true,
                timestamp: Date.now()
              });
            });
      })
      .catch((err) => {
        resetRecordingState();
        messages.value.push({
          id: generateUniqueId(),
          content: '🎤 无法访问麦克风，请检查权限',
          isUser: true,
          timestamp: Date.now()
        });
      });
};

const handleVoiceRelease = async () => {
  if (!isRecording.value || isUploading.value) {
    resetRecordingState();
    return;
  }
  isRecording.value = false;
  isUploading.value = true;
  clearTimeout(timeoutTimerRef.value);

  uploadMsgId.value = generateUniqueId();
  messages.value.push({
    id: uploadMsgId.value,
    content: '🎤 录音上传中...',
    isUser: true,
    timestamp: Date.now()
  });
  scrollToBottom();

  try {
    if (!mediaRecorder.value) throw new Error('录音实例不存在');
    mediaRecorder.value.stop();
    const audioBlob = await Promise.race([
      recordingPromise.value,
      new Promise((_, reject) => setTimeout(() => reject(new Error('Blob生成超时')), 5000))
    ]);

    streamRef.value?.getTracks().forEach(track => track.stop());

    if (audioBlob && audioBlob.size > 0) {
      await uploadVoiceToBackend(audioBlob);
    } else {
      updateUploadMsg('🎤 录音数据为空，请重试', false);
    }
  } catch (err) {
    updateUploadMsg(`🎤 录音处理失败：${err.message}`, false);
  } finally {
    resetRecordingState();
    isUploading.value = false;
  }
};

const cancelVoiceRecord = async () => {
  if (!isRecording.value || isUploading.value) return;
  clearTimeout(timeoutTimerRef.value);

  if (mediaRecorder.value) {
    mediaRecorder.value.stop();
    await recordingPromise.value;
  }

  streamRef.value?.getTracks().forEach(track => track.stop());
  messages.value.push({
    id: generateUniqueId(),
    content: '🎤 录音已取消',
    isUser: true,
    timestamp: Date.now()
  });
  scrollToBottom();
  resetRecordingState();
};

const resetRecordingState = () => {
  isRecording.value = false;
  audioChunks.value = [];
  touchStartY.value = 0;
  mouseStartPos.value = { x: 0, y: 0 };
  mediaRecorder.value = null;
  streamRef.value = null;
  timeoutTimerRef.value = null;
  recordingPromise.value = null;
  uploadMsgId.value = '';
};

const updateUploadMsg = (content, isSuccess) => {
  const msgIndex = messages.value.findIndex(msg => msg.id === uploadMsgId.value);
  if (msgIndex !== -1) {
    messages.value[msgIndex].content = content;
    messages.value = [...messages.value];
  } else if (!isSuccess) {
    messages.value.push({
      id: generateUniqueId(),
      content: content,
      isUser: true,
      timestamp: Date.now()
    });
  }
  scrollToBottom();
};

const uploadVoiceToBackend = async (audioBlob) => {
  if (audioBlob.size > 1024 * 1024) {
    const sizeKB = (audioBlob.size / 1024).toFixed(1);
    updateUploadMsg(`🎤 录音过大（${sizeKB}KB），请控制在1分钟内`, false);
    return;
  }

  const audio = new Audio();
  const duration = await new Promise((resolve) => {
    audio.onloadedmetadata = () => resolve(audio.duration);
    audio.onerror = () => resolve(0);
    audio.src = URL.createObjectURL(audioBlob);
  });
  URL.revokeObjectURL(audio.src);

  if (duration < 0.5) {
    updateUploadMsg(`🎤 录音时长过短（${duration.toFixed(1)}秒），请录1秒以上`, false);
    return;
  }

  const formData = new FormData();
  formData.append('file', audioBlob, 'recording.opus');
  formData.append('sampleRate', '16000');
  formData.append('audioType', 'opus');

  try {
    // 调用后端语音转文字接口：agent/voice/voiceToText
    const text = await request.post('agent/voice/voiceToText', formData, {
      'Content-Type': 'multipart/form-data',
      timeout: 30000
    });

    updateUploadMsg(`🎤 语音识别结果：${text}`, true);
    sendMessageViaWebSocket(text);
  } catch (error) {
    updateUploadMsg(`🎤 语音识别失败：${error.message || '网络异常'}`, false);
  }
};

const sendMessageViaWebSocket = (text) => {
  const conversationId = generateUniqueId();
  socket.send(JSON.stringify({ content: text, role: 'user', conversationId }));
  scrollToBottom();
};

// 单条AI消息的语音播放
const playMessageVoice = async (text) => {
  try {
    const audioBlob = await convertTextToVoice(text);
    if (audioBlob) await playAudioBlob(audioBlob);
  } catch (error) {
    console.error('[消息语音播放] 失败:', error);
    uni.showToast({ title: '语音播放失败', icon: 'none' });
  }
};

// 全局语音开关切换
const toggleGlobalVoice = () => {
  isVoiceGlobalOn.value = !isVoiceGlobalOn.value;
};

const convertTextToVoice = async (text) => {
  try {
    const audioBlob = await request.post(
        'agent/voice/textToVoice',
        { text },
        {
          responseType: 'arraybuffer',
          headers: { 'Content-Type': 'application/json' }
        }
    );
    return new Blob([audioBlob], { type: 'audio/mpeg' });
  } catch (error) {
    console.error('[语音播报] 接口调用失败:', error);
    return null;
  }
};

const playAudioBlob = (blob) => {
  return new Promise((resolve) => {
    const audioUrl = URL.createObjectURL(blob);
    const audio = new Audio(audioUrl);
    audio.onended = () => {
      URL.revokeObjectURL(audioUrl);
      resolve();
    };
    audio.onerror = () => {
      URL.revokeObjectURL(audioUrl);
      resolve();
    };
    audio.play().catch((err) => {
      console.error('[语音播报] 播放失败:', err);
      resolve();
    });
  });
};

/* ========== 组件生命周期 ========== */
onMounted(() => {
  scrollToBottom();
});

onUnmounted(() => {
  if (streamRef.value) {
    streamRef.value.getTracks().forEach(track => track.stop());
  }
  socket.close();
  cleanupGlobalMouseEvents();
});

// 返回首页（暂不触发，仅保留逻辑）
const goBackToHome = () => {
  uni.navigateBack({ delta: 1 });
};
</script>


<style scoped>/* 全局盒模型重置，确保尺寸计算精准 */
* {
  box-sizing: border-box;
  margin: 0;
  padding: 0;
}

.chat-page {
  width: 100%;
  height: 100vh;
  display: flex;
  flex-direction: column;
  background-color: #f5f7fa;
  overflow: hidden; /* 禁止页面整体滚动 */
}

/* 顶部栏：固定高度，提供导航 */
.page-header {
  display: flex;
  justify-content: space-between;
  align-items: center;
  height: 44px;
  padding: 0 12px;
  background-color: #fff;
  border-bottom: 1px solid #eee;
  z-index: 10; /* 确保顶部栏在最上层 */
}

.header-avatar {
  display: flex;
  align-items: center;
}

.user-avatar-icon {
  width: 32px;
  height: 32px;
  border-radius: 50%;
}

.header-title {
  flex: 1;
  text-align: center;
  font-size: 16px;
  font-weight: 500;
  color: #333;
}

.header-right {
  display: flex;
  align-items: center;
}

.global-voice-btn {
  background: transparent;
  border: none;
  padding: 4px;
  cursor: pointer;
}

.global-voice-icon {
  width: 20px;
  height: 20px;
}

/* 聊天容器：占满剩余高度 + 相对定位 */
.chat-container {
  flex: 1; /* 占满顶部栏下方的全部剩余高度 */
  display: flex;
  flex-direction: column;
  position: relative;
  overflow: hidden; /* 防止内部元素溢出父容器 */
}

/* 消息列表：独立滚动容器 */
.chat-messages {
  flex: 1; /* 占满「chat-container - 输入区」的高度 */
  overflow-y: auto; /* 内容溢出时，内部垂直滚动 */
  padding: 10px;
  display: flex;
  flex-direction: column;
  gap: 10px; /* 消息间的垂直间距 */
}

.message-item {
  display: flex;
  max-width: 70%;
  align-items: flex-start;
}

.user-message {
  margin-left: auto;
  flex-direction: row-reverse;
}

.ai-message {
  margin-right: auto;
  flex-direction: row;
}

.avatar {
  width: 36px;
  height: 36px;
  border-radius: 50%;
  overflow: hidden;
  margin: 0 6px;
  align-self: flex-start;
  flex-shrink: 0; /* 防止头像被压缩变形 */
}

.avatar image {
  width: 100%;
  height: 100%;
  object-fit: cover;
}

.message-bubble {
  display: flex;
  flex-direction: column;
  background: #fff;
  border: 1px solid #eee;
  border-radius: 12px;
  padding: 6px 10px;
  max-width: calc(100% - 48px); /* 适配头像和间距 */
  word-wrap: break-word; /* 长文本自动换行 */
}

.user-message .message-bubble {
  background-color: #d3eafd;
  border-color: #b3d8f4;
}

.ai-message .message-bubble {
  background-color: #f1f1f1;
  border-color: #e5e5e5;
}

.message-content {
  font-size: 14px;
  line-height: 1.5;
  color: #333;
}

.message-time {
  font-size: 11px;
  color: #999;
  text-align: right;
  margin-top: 3px;
}

.message-voice-btn {
  background: transparent;
  border: none;
  padding: 6px;
  display: flex;
  align-items: flex-start;
  margin-top: 3px;
  cursor: pointer;
}

.message-voice-icon {
  width: 18px;
  height: 18px;
}

/* 输入区域：固定在底部 + 高可见性 */
.chat-input {
  position: absolute;
  bottom: 0;    /* 贴紧聊天容器底部 */
  left: 0;      /* 贴紧左侧 */
  right: 0;     /* 贴紧右侧 */
  display: flex;
  align-items: center;
  gap: 6px;     /* 元素间间距 */
  padding: 8px 12px; /* 内边距，增加可点击区域 */
  background: #fff;  /* 白色背景，与消息区区分 */
  border-top: 1px solid #e5e5e5; /* 顶部边框强化区域感 */
  height: 56px; /* 固定高度，确保元素不拥挤 */
  z-index: 10;  /* 确保在消息区上方，不被遮挡 */
}

.input-text {
  flex: 1;
  height: 36px; /* 与按钮高度对齐 */
  padding: 0 12px;
  border: 1px solid #ddd; /* 灰色边框强化输入框视觉 */
  border-radius: 18px;    /* 圆角优化 */
  resize: none;
  font-size: 14px;
  color: #333;  /* 黑色文字确保可见 */
  line-height: 36px;      /* 文字垂直居中 */
}

.send-btn, .upload-btn {
  height: 36px; /* 与输入框高度对齐 */
  padding: 0 14px;
  border: none;
  border-radius: 18px;
  color: #fff;
  cursor: pointer;
  font-size: 14px;
  display: flex;
  align-items: center;
  justify-content: center;
}

.send-btn {
  background: #409eff;
}

.send-btn:hover {
  background: #66b1ff;
}

.upload-btn {
  background: #67c23a;
}

.upload-btn:hover {
  background: #85ce61;
}

.voice-container {
  position: relative;
}

.voice-btn {
  width: 36px;
  height: 36px; /* 与输入框高度对齐 */
  border: none;
  border-radius: 50%;
  background: #409eff;
  color: #fff;
  cursor: pointer;
  display: flex;
  align-items: center;
  justify-content: center;
}

.voice-btn:hover {
  background: #66b1ff;
}

.voice-container.recording .voice-btn {
  background: #ff4d4f;
}

.voice-tip {
  position: absolute;
  bottom: 120%;
  left: 50%;
  transform: translateX(-50%);
  background: rgba(0,0,0,0.9);
  color: #fff;
  padding: 6px 10px;
  border-radius: 4px;
  font-size: 11px;
  white-space: nowrap;
  z-index: 10;
}

.audio-wave {
  display: flex;
  align-items: flex-end;
  height: 18px;
  gap: 2px;
  margin-bottom: 4px;
}

.audio-wave span {
  width: 2px;
  background-color: #409eff;
  border-radius: 2px;
  animation: wave 1s ease-in-out infinite;
}

.voice-container.recording .audio-wave span {
  background-color: #ff4d4f;
}

.audio-wave span:nth-child(2) { animation-delay: 0.1s; }
.audio-wave span:nth-child(3) { animation-delay: 0.2s; }
.audio-wave span:nth-child(4) { animation-delay: 0.3s; }
.audio-wave span:nth-child(5) { animation-delay: 0.4s; }

@keyframes wave {
  0%, 100% { height: 4px; }
  50% { height: 16px; }
}

.mic-icon {
  width: 18px;
  height: 18px;
}
</style>