import { View, Text, Input, Button, ScrollView, Image } from '@tarojs/components'
import Taro, { useLoad, getStorageSync, setStorageSync, useRouter, useDidShow } from '@tarojs/taro'
import { useState, useRef, useEffect, useMemo, useCallback } from 'react';
import { io as createIO, Socket } from 'socket.io-client'
import { Photograph, Phone } from '@taroify/icons'
import { debounce } from '../../utils/debounce'
import { formatChatTime } from '../../utils/dateFormat'
import { voicePlayerManager, formatTime, formatDuration } from '../../utils/voicePlayer'
import { voiceDebugger } from '../../utils/voiceDebugger'
import { requestVideoCallPermissions, getPermissionDebugInfo } from '../../utils/permissionHelper'

import './chat.scss'



export default function Index() {
  useLoad(() => {
    console.log('Page loaded.')
  })

  // 页面显示时重新拉取消息
  useDidShow(() => {
    if (chatId) {
      console.log('🔄 页面显示，重新拉取消息')
      // 启动自动刷新
      startAutoRefresh()
      // 立即刷新一次消息
      refreshMessages()
    }
  })

  // 状态管理
  const [showEmojiPanel, setShowEmojiPanel] = useState(false);
  const [inputValue, setInputValue] = useState('');
  const [recentEmojis, setRecentEmojis] = useState<string[]>([]);
  const [showExtraFunction, setShowExtraFunction] = useState(false);
  const [isVoiceMode, setIsVoiceMode] = useState(false);
  const inputRef = useRef<any>(null);
  const panelRef = useRef<any>(null);
  const router = useRouter()
  const [chatId, setChatId] = useState<string | undefined>(undefined)
  const [messages, setMessages] = useState<any[]>([])
  const [userMap, setUserMap] = useState<Record<string, any>>({})
  const [peerId, setPeerId] = useState<string | undefined>(undefined)
  const [peerUser, setPeerUser] = useState<any>(null) // 对方用户信息
  const [currentUser, setCurrentUser] = useState<any>(null) // 当前用户信息
  const socketRef = useRef<Socket | null>(null)
  
  // 上拉刷新相关状态
  const [hasMoreMessages, setHasMoreMessages] = useState(true)
  const [isLoadingMore, setIsLoadingMore] = useState(false)
  const [lastMessageId, setLastMessageId] = useState<string | null>(null)
  
  // 下拉刷新相关状态
  const [isRefreshing, setIsRefreshing] = useState(false)
  const [refresherTriggered, setRefresherTriggered] = useState(false)
  
  // 自动刷新相关状态
  const [isAutoRefreshing, setIsAutoRefreshing] = useState(false)
  const [lastRefreshTime, setLastRefreshTime] = useState<number>(0)
  const autoRefreshTimerRef = useRef<any>(null)
  const refreshIntervalRef = useRef<any>(null)
  

  // 追加消息且去重（按 _id 或组合键）
  const pushUniqueMessage = useCallback((m: any) => {
    if (!m) {
      console.warn('收到空消息，跳过')
      return
    }
    
    console.log('收到新消息:', {
      id: m._id,
      senderId: m.senderId,
      content: m.content,
      type: m.type,
      createdAt: m.createdAt
    })
    
    setMessages((prev) => {
      const id = String(m._id || '')
      
      // 如果有ID，直接按ID去重
      if (id) {
        const exists = prev.some(x => String(x._id || '') === id)
        if (exists) {
          console.log('消息已存在，跳过:', id)
          return prev
        }
        console.log('添加新消息:', id)
        return [...prev, m]
      }
      
      // 如果没有ID，使用组合键去重
      const key = `${m.senderId}|${m.type || 'text'}|${m.content || m.imageUrl || m.voiceUrl || ''}|${m.createdAt}`
      const exists = prev.some(x => {
        const xKey = `${x.senderId}|${x.type || 'text'}|${x.content || x.imageUrl || x.voiceUrl || ''}|${x.createdAt}`
        return xKey === key
      })
      
      if (exists) {
        console.log('消息已存在（组合键），跳过:', key)
        return prev
      }
      
      console.log('添加新消息（组合键）:', key)
      return [...prev, m]
    })
  }, [])

  // 视频通话相关状态
  const [isVideoCallActive, setIsVideoCallActive] = useState(false)
  const [videoCallId, setVideoCallId] = useState<string | null>(null)
  const [isVideoCallIncoming, setIsVideoCallIncoming] = useState(false)
  const [incomingCallInfo, setIncomingCallInfo] = useState<any>(null)
  const localVideoRef = useRef<HTMLVideoElement | null>(null)
  const remoteVideoRef = useRef<HTMLVideoElement | null>(null)
  const peerConnectionRef = useRef<RTCPeerConnection | null>(null)
  const localStreamRef = useRef<MediaStream | null>(null)

  // 语音通话（音频-only）相关状态
  const [isVoiceCallActive, setIsVoiceCallActive] = useState(false)
  const [voiceCallId, setVoiceCallId] = useState<string | null>(null)
  const [isVoiceCallIncoming, setIsVoiceCallIncoming] = useState(false)
  const [incomingVoiceInfo, setIncomingVoiceInfo] = useState<any>(null)
  const voicePeerConnectionRef = useRef<RTCPeerConnection | null>(null)
  const voiceLocalStreamRef = useRef<MediaStream | null>(null)

  // 语音相关状态
  const [isRecording, setIsRecording] = useState(false);
  const [recordingDuration, setRecordingDuration] = useState(0);
  const [playingVoiceId, setPlayingVoiceId] = useState<string | null>(null);
  const [voicePlayProgress, setVoicePlayProgress] = useState(0);
  const [voicePlayDuration, setVoicePlayDuration] = useState(0);
  const [voicePlayCurrentTime, setVoicePlayCurrentTime] = useState(0);
  const [playbackError, setPlaybackError] = useState<string | null>(null);
  const recordingTimerRef = useRef<any>(null);
  const recordingStartTimeRef = useRef<number>(0);
  const recorderManagerRef = useRef<any>(null);
  const playProgressTimerRef = useRef<any>(null);

  // 表情数据
  const emojiData = {
    表情: ['😀', '😃', '😄', '😁', '😆', '😅', '🤣', '😂', '🙂', '🙃', '😊', '😍', '😘', '😜', '🤔', '🙌', '😭', '😡', '😱', '😴', '😇', '😏', '😬', '🤗', '🤩', '🤯', '🤮', '🤒', '🤧', '🥳'],
    符号: ['❤️', '⭐', '🔥', '🎉', '✨', '✔️', '❌', '❓', '❗', '💯', '💡', '✅', '⚠️', '⚡', '☀️', '🌙', '⭐️', '☁️', '☔️']
  };

  const allEmojis: string[] = Object.values(emojiData).flat();

  // 请求摄像头和麦克风权限
  const requestMediaPermissions = async () => {
    try {
      // 使用新的权限管理工具
      return await requestVideoCallPermissions()
    } catch (error) {
      console.error('权限请求失败:', error)
      return false
    }
  }

  // 获取媒体流（小程序环境）
  const getMediaStream = async () => {
    try {
      // 在H5环境中使用getUserMedia
      if (process.env.TARO_ENV === 'h5') {
        if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
          return await navigator.mediaDevices.getUserMedia({
            video: true,
            audio: true
          })
        }
      }

      // 小程序环境或WebRTC不支持时返回空流
      console.warn('WebRTC不支持，使用模拟媒体流')
      return {
        getVideoTracks: () => [],
        getAudioTracks: () => [],
        getTracks: () => [],
        active: false,
        id: 'mock-stream',
        onaddtrack: null,
        onremovetrack: null,
        addTrack: () => {},
        removeTrack: () => {},
        getTrackById: () => null,
        clone: () => ({} as any),
        addEventListener: () => {},
        removeEventListener: () => {},
        dispatchEvent: () => false
      } as any
    } catch (error) {
      console.error('获取媒体流失败:', error)
      // 返回空流而不是抛出错误
      return {
        getVideoTracks: () => [],
        getAudioTracks: () => [],
        getTracks: () => [],
        active: false,
        id: 'mock-stream',
        onaddtrack: null,
        onremovetrack: null,
        addTrack: () => {},
        removeTrack: () => {},
        getTrackById: () => null,
        clone: () => ({} as any),
        addEventListener: () => {},
        removeEventListener: () => {},
        dispatchEvent: () => false
      } as any
    }
  }

  // 权限调试功能
  const debugPermissions = async () => {
    try {
      const debugInfo = await getPermissionDebugInfo()
      console.log('权限调试信息:', debugInfo)

      let message = '权限状态:\n'
      Object.entries(debugInfo.allPermissions).forEach(([scope, granted]) => {
        message += `${scope}: ${granted ? '已授权' : '未授权'}\n`
      })

      if (debugInfo.recommendations.length > 0) {
        message += '\n建议:\n' + debugInfo.recommendations.join('\n')
      }

      Taro.showModal({
        title: '权限调试信息',
        content: message,
        showCancel: false
      })
    } catch (error) {
      console.error('获取权限调试信息失败:', error)
    }
  }

  // 检测是否在开发者工具环境
  const isDevTool = () => {
    try {
      // @ts-ignore
      const systemInfo = typeof wx !== 'undefined' ? wx.getDeviceInfo() : null
      return systemInfo?.platform === 'devtools'
    } catch {
      return false
    }
  }


  // 显示开发者工具环境提示
  const showDevToolWarning = (feature: string) => {
    if (isDevTool()) {
      Taro.showModal({
        title: '开发者工具限制',
        content: `在开发者工具中，${feature}功能可能受限或无法正常工作。建议在真机上测试完整功能。`,
        showCancel: false,
        confirmText: '知道了'
      })
    }
  }

  // 请求麦克风权限（语音通话）
  const requestMicPermission = async () => {
    try {
      console.log('🎤 开始请求麦克风权限...')
      console.log('📱 当前环境:', process.env.TARO_ENV)
      console.log('🔧 是否在开发者工具:', isDevTool())

      const webRTCSupport = checkWebRTCSupport()
      console.log('🎤 麦克风权限请求时的WebRTC支持情况:', webRTCSupport)

      if (process.env.TARO_ENV === 'weapp') {
        const app = Taro.getApp()
        
        // 先检查当前权限状态
        const setting = await Taro.getSetting()
        console.log('📋 当前权限状态:', setting.authSetting)
        
        // 如果在开发者工具中，给出特殊提示
        if (isDevTool()) {
          console.warn('⚠️ 当前在开发者工具中，语音通话功能可能受限')
          Taro.showToast({
            title: '开发者工具中语音通话功能受限，请在真机上测试',
            icon: 'none',
            duration: 3000
          })
          // 在开发者工具中仍然尝试请求权限，但降低期望
          try {
            // @ts-ignore
            const granted = !!(await app?.globalData?.requestRecordPermission?.())
            console.log('🎤 开发者工具权限请求结果:', granted)
            return granted
          } catch (error) {
            console.warn('⚠️ 开发者工具权限请求失败，继续执行:', error)
            return true // 在开发者工具中允许继续
          }
        }

        // 真机环境正常请求权限
        // @ts-ignore
        const granted = !!(await app?.globalData?.requestRecordPermission?.())
        console.log('🎤 真机权限请求结果:', granted)
        return granted
      }
      
      // H5环境或其他环境
      if (webRTCSupport.hasMediaDevices) {
        console.log('🌐 使用现代WebRTC API请求麦克风权限...')
        await navigator.mediaDevices.getUserMedia({ audio: true, video: false })
        console.log('✅ 现代WebRTC API麦克风权限获取成功')
        return true
      } else if (webRTCSupport.hasLegacyGetUserMedia) {
        console.log('🌐 使用传统getUserMedia API请求麦克风权限...')
        return new Promise((resolve, reject) => {
          (navigator as any).getUserMedia(
            { audio: true, video: false },
            (stream: MediaStream) => {
              console.log('✅ 传统getUserMedia API麦克风权限获取成功')
              stream.getTracks().forEach(track => track.stop()) // 立即停止流，只是测试权限
              resolve(true)
            },
            (error: any) => {
              console.error('❌ 传统getUserMedia API麦克风权限获取失败:', error)
              reject(error)
            }
          )
        })
      }
      
      // 如果都不支持，检查是否在开发者工具中
      if (isDevTool()) {
        console.warn('⚠️ 开发者工具中WebRTC不支持，模拟麦克风权限已授予')
        return true
      }
      
      // 在真机上，如果WebRTC不支持，返回false
      console.error('❌ 真机上WebRTC不支持，无法获取麦克风权限')
      return false
      
    } catch (error) {
      console.error('❌ 麦克风权限请求失败:', error)
      
      // 在开发者工具中，即使权限请求失败也允许继续
      if (isDevTool()) {
        console.warn('⚠️ 开发者工具中权限请求失败，但允许继续执行')
        return true
      }
      
      return false
    }
  }

  // 检查WebRTC支持
  const checkWebRTCSupport = () => {
    console.log('🔍 检查WebRTC支持情况...')
    console.log('navigator.mediaDevices:', !!navigator.mediaDevices)
    console.log('navigator.mediaDevices.getUserMedia:', !!(navigator.mediaDevices && navigator.mediaDevices.getUserMedia))
    console.log('RTCPeerConnection:', typeof RTCPeerConnection !== 'undefined')
    console.log('navigator.getUserMedia (legacy):', !!(navigator as any).getUserMedia)
    
    return {
      hasMediaDevices: !!(navigator.mediaDevices && navigator.mediaDevices.getUserMedia),
      hasRTCPeerConnection: typeof RTCPeerConnection !== 'undefined',
      hasLegacyGetUserMedia: !!(navigator as any).getUserMedia
    }
  }

  // 获取仅音频媒体流（H5），小程序端交由组件处理
  const getAudioOnlyStream = async () => {
    try {
      const webRTCSupport = checkWebRTCSupport()
      console.log('🎤 WebRTC支持情况:', webRTCSupport)
      
      // 优先尝试使用现代WebRTC API
      if (webRTCSupport.hasMediaDevices) {
        console.log('🎤 使用现代WebRTC API获取真实音频流...')
        const stream = await navigator.mediaDevices.getUserMedia({ audio: true, video: false })
        console.log('✅ 成功获取真实音频流:', {
          audioTracks: stream.getAudioTracks().length,
          stream: stream
        })
        return stream
      }
      
      // 尝试使用传统的getUserMedia API
      if (webRTCSupport.hasLegacyGetUserMedia) {
        console.log('🎤 使用传统getUserMedia API获取真实音频流...')
        return new Promise((resolve, reject) => {
          (navigator as any).getUserMedia(
            { audio: true, video: false },
            (stream: MediaStream) => {
              console.log('✅ 成功获取真实音频流 (legacy):', {
                audioTracks: stream.getAudioTracks().length,
                stream: stream
              })
              resolve(stream)
            },
            (error: any) => {
              console.error('❌ 传统getUserMedia失败:', error)
              reject(error)
            }
          )
        })
      }
      
      // 如果都不支持，检查是否在开发者工具中
      if (isDevTool()) {
        console.warn('⚠️ 开发者工具中WebRTC不支持，使用模拟音频流')
        return {
          getAudioTracks: () => [],
          getTracks: () => [],
          active: false,
          id: 'mock-audio-stream',
          onaddtrack: null,
          onremovetrack: null,
          addTrack: () => {},
          removeTrack: () => {},
          getTrackById: () => null,
          clone: () => ({} as any),
          addEventListener: () => {},
          removeEventListener: () => {},
          dispatchEvent: () => false
        } as any
      }
      
      // 在真机上，如果WebRTC不支持，抛出错误
      throw new Error('WebRTC不支持，无法获取音频流')
      
    } catch (error) {
      console.error('获取音频流失败:', error)
      
      // 在开发者工具中，返回模拟流
      if (isDevTool()) {
        console.warn('⚠️ 开发者工具中音频流获取失败，使用模拟流')
        return {
          getAudioTracks: () => [],
          getTracks: () => [],
          active: false,
          id: 'mock-audio-stream',
          onaddtrack: null,
          onremovetrack: null,
          addTrack: () => {},
          removeTrack: () => {},
          getTrackById: () => null,
          clone: () => ({} as any),
          addEventListener: () => {},
          removeEventListener: () => {},
          dispatchEvent: () => false
        } as any
      }
      
      // 在真机上，重新抛出错误
      throw error
    }
  }

  // 发起视频通话
  const startVideoCall = async () => {
    if (!peerId) {
      Taro.showToast({ title: '无法发起视频通话：缺少对方信息', icon: 'none' })
      return
    }

    try {
      // 先请求权限
      const hasPermissions = await requestMediaPermissions()
      if (!hasPermissions) {
        // 显示权限调试信息
        await debugPermissions()
        return
      }

      // 获取本地媒体流
      const stream = await getMediaStream()
      localStreamRef.current = stream

      // 创建视频通话房间ID
      const callId = `video_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`
      setVideoCallId(callId)

      // 发送视频通话邀请
      if (socketRef.current) {
        console.log('Taro端发起视频通话:', {
          to: peerId,
          callId: callId,
          from: currentUser?._id,
          fromName: currentUser?.nickname || currentUser?.username || '用户'
        })

        // 发送给后端，让后端转发给目标用户
        socketRef.current.emit('video:forward', {
          to: peerId,
          callId: callId,
          from: currentUser?._id,
          fromName: currentUser?.nickname || currentUser?.username || '用户'
        })
      }

      // 初始化WebRTC连接
      await initWebRTC(true) // true表示是发起方

      setIsVideoCallActive(true)
      Taro.showToast({ title: '正在发起视频通话...', icon: 'none' })

    } catch (error: any) {
      console.error('启动视频通话失败:', error)
      Taro.showToast({ title: '无法访问摄像头和麦克风，请检查权限设置', icon: 'none' })
      // 显示权限调试信息
      await debugPermissions()
    }
  }

  // 发起语音通话（仅音频）
  const startVoiceCall = async () => {
    if (!peerId) {
      Taro.showToast({ title: '无法发起语音通话：缺少对方信息', icon: 'none' })
      return
    }

    try {
      console.log('🎤 开始发起语音通话...')
      console.log('📱 当前环境:', process.env.TARO_ENV)
      console.log('🔧 是否在开发者工具:', isDevTool())

      // 请求麦克风权限
      const hasMic = await requestMicPermission()
      if (!hasMic) {
        Taro.showToast({ title: '需要麦克风权限', icon: 'none' })
        return
      }

      // 在开发者工具中给出特殊提示
      if (isDevTool()) {
        Taro.showToast({
          title: '开发者工具中语音通话功能受限，建议在真机上测试',
          icon: 'none',
          duration: 3000
        })
      }

      // TRTC 路径优先
      try {
        await initVoiceTRTC(true)
      } catch (e) {
        console.warn('TRTC 初始化失败，回退到原生/模拟 WebRTC:', e)
        // 获取音频流（仅作为兜底）
        const stream = await getAudioOnlyStream()
        voiceLocalStreamRef.current = stream
        await initVoiceWebRTC(true)
      }
      
      // 创建通话ID
      const callId = `voice_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`
      setVoiceCallId(callId)
      
      console.log('📞 语音通话参数:', {
        callId,
        peerId,
        currentUserId: currentUser?._id,
        fromName: currentUser?.nickname || currentUser?.username || '用户'
      })

      // 发送通话邀请
      if (socketRef.current) {
        console.log('📤 发送语音通话邀请:', {
          to: peerId,
          callId,
          from: currentUser?._id,
          fromName: currentUser?.nickname || currentUser?.username || '用户'
        })
        socketRef.current.emit('voice:forward', {
          to: peerId,
          callId,
          from: currentUser?._id,
          fromName: currentUser?.nickname || currentUser?.username || '用户'
        })
        console.log('📤 语音通话邀请已发送')
      }

      // 激活语音通话界面
      setIsVoiceCallActive(true)
      
      Taro.showToast({ 
        title: isDevTool() ? '开发者工具中语音通话功能受限' : '正在发起语音通话...', 
        icon: 'none' 
      })
      
    } catch (error) {
      console.error('❌ 启动语音通话失败:', error)
      
      // 根据错误类型给出不同的提示
      if (isDevTool()) {
        Taro.showToast({ 
          title: '开发者工具中语音通话功能受限，请在真机上测试', 
          icon: 'none',
          duration: 3000
        })
      } else {
        Taro.showToast({ 
          title: '无法访问麦克风，请检查权限设置', 
          icon: 'none' 
        })
      }
    }
  }

  // 视频通话相关方法
  const answerVideoCall = async (callInfo: any) => {
    try {
      setVideoCallId(callInfo.callId)
      setIsVideoCallIncoming(false)

      // 先请求权限
      const hasPermissions = await requestMediaPermissions()
      if (!hasPermissions) {
        Taro.showToast({ title: '需要摄像头和麦克风权限才能进行视频通话', icon: 'none' })
        return
      }

      // 获取本地媒体流
      const stream = await getMediaStream()
      localStreamRef.current = stream

      // 初始化WebRTC连接
      await initWebRTC(false) // false表示是接听方

      setIsVideoCallActive(true)
      Taro.showToast({ title: '正在连接视频通话...', icon: 'none' })

    } catch (error: any) {
      console.error('接听视频通话失败:', error)
      Taro.showToast({ title: '无法访问摄像头和麦克风，请检查权限设置', icon: 'none' })
    }
  }

  const rejectVideoCall = (callInfo: any) => {
    if (socketRef.current) {
      socketRef.current.emit('video:reject', {
        callId: callInfo.callId,
        to: callInfo.from
      })
    }
    setIsVideoCallIncoming(false)
    setIncomingCallInfo(null)
  }

  const initWebRTC = async (isInitiator: boolean) => {
    try {
      // 检查WebRTC支持
      if (typeof RTCPeerConnection === 'undefined') {
        console.warn('WebRTC不支持，使用模拟模式')
        // 在小程序环境中，我们使用模拟的WebRTC连接
        peerConnectionRef.current = {
          addTrack: () => {},
          createOffer: () => Promise.resolve({}),
          createAnswer: () => Promise.resolve({}),
          setLocalDescription: () => Promise.resolve(),
          setRemoteDescription: () => Promise.resolve(),
          addIceCandidate: () => Promise.resolve(),
          close: () => {},
          onicecandidate: null,
          onconnectionstatechange: null,
          ontrack: null
        } as any
        return
      }

      // 创建RTCPeerConnection
      const config = {
        iceServers: [
          { urls: 'stun:stun.l.google.com:19302' },
          { urls: 'stun:stun1.l.google.com:19302' }
        ]
      }

      peerConnectionRef.current = new RTCPeerConnection(config)

      // 添加本地流
      if (localStreamRef.current) {
        localStreamRef.current.getTracks().forEach(track => {
          peerConnectionRef.current!.addTrack(track, localStreamRef.current!)
        })

        // 设置本地视频显示
        if (localVideoRef.current) {
          localVideoRef.current.srcObject = localStreamRef.current
        }
      }

      // 处理远程流
      peerConnectionRef.current.ontrack = (event) => {
        console.log('收到远程流:', event)
        if (remoteVideoRef.current) {
          remoteVideoRef.current.srcObject = event.streams[0]
        }
      }

      // 处理ICE候选
      peerConnectionRef.current.onicecandidate = (event) => {
        if (event.candidate && socketRef.current && videoCallId) {
          console.log('发送ICE候选:', event.candidate)
          socketRef.current.emit('video:ice-candidate', {
            callId: videoCallId,
            candidate: event.candidate,
            to: peerId
          })
        }
      }

      // 连接状态变化处理
      peerConnectionRef.current.onconnectionstatechange = () => {
        console.log('连接状态变化:', peerConnectionRef.current?.connectionState)
        if (peerConnectionRef.current?.connectionState === 'connected') {
          Taro.showToast({ title: '视频通话已连接', icon: 'success' })
        } else if (peerConnectionRef.current?.connectionState === 'disconnected') {
          Taro.showToast({ title: '视频通话已断开', icon: 'none' })
        }
      }

      // 如果是发起方，创建offer
      if (isInitiator) {
        const offer = await peerConnectionRef.current.createOffer()
        await peerConnectionRef.current.setLocalDescription(offer)

        if (socketRef.current && videoCallId) {
          console.log('发送offer:', offer)
          socketRef.current.emit('video:offer', {
            callId: videoCallId,
            offer: offer,
            to: peerId
          })
        }
      }

    } catch (error) {
      console.error('WebRTC初始化失败:', error)
      Taro.showToast({ title: '视频通话初始化失败', icon: 'none' })
    }
  }

  const handleVideoOffer = useCallback(async (offer: RTCSessionDescriptionInit) => {
    try {
      console.log('处理视频通话offer:', offer)
      await peerConnectionRef.current!.setRemoteDescription(offer)
      const answer = await peerConnectionRef.current!.createAnswer()
      await peerConnectionRef.current!.setLocalDescription(answer)

      if (socketRef.current && videoCallId) {
        console.log('发送answer:', answer)
        socketRef.current.emit('video:answer', {
          callId: videoCallId,
          answer: answer,
          to: peerId
        })
      }
    } catch (error) {
      console.error('处理视频通话offer失败:', error)
      Taro.showToast({ title: '处理视频通话失败', icon: 'none' })
    }
  }, [videoCallId, peerId])

  const endVideoCall = useCallback(() => {
    try {
      // 停止本地流
      if (localStreamRef.current) {
        localStreamRef.current.getTracks().forEach(track => track.stop())
        localStreamRef.current = null
      }

      // 关闭WebRTC连接
      if (peerConnectionRef.current) {
        peerConnectionRef.current.close()
        peerConnectionRef.current = null
      }

      // 发送结束通话信号
      if (socketRef.current && videoCallId) {
        socketRef.current.emit('video:end', {
          callId: videoCallId,
          to: peerId
        })
      }

      setIsVideoCallActive(false)
      setVideoCallId(null)

      Taro.showToast({ title: '视频通话已结束', icon: 'success' })
    } catch (error) {
      console.error('结束视频通话失败:', error)
    }
  }, [videoCallId, peerId])

  // 拉取 userSig（后端已提供 /api/trtc/userSig）
  const fetchUserSig = async (userId: string) => {
    const app = Taro.getApp()
    // @ts-ignore
    const API_BASE = app?.globalData?.apiBase || Taro.getStorageSync('API_BASE') || 'http://localhost:3000'
    const res = await Taro.request({ url: `${API_BASE}/api/trtc/userSig`, method: 'GET', data: { userId } })
    if ((res.statusCode as number) >= 200 && (res.statusCode as number) < 300 && (res.data as any)?.userSig) {
      return res.data as any
    }
    throw new Error('获取 userSig 失败')
  }

  // 使用 TRTC 建立仅音频通话（优先）
  const initVoiceTRTC = async (isInitiator: boolean) => {
    try {
      const app = Taro.getApp()
      // @ts-ignore
      const currentUserId = app?.globalData?.currentUserId
      if (!currentUserId || !voiceCallId) throw new Error('缺少用户或通话ID')

      // 动态引入 TRTC SDK（weapp/h5 分支分别处理）
      let TRTC: any = null
      try {
        if (process.env.TARO_ENV === 'weapp') {
          // 小程序端 SDK 包名根据你的集成而定，这里使用 trtc-miniapp 作为占位
          // eslint-disable-next-line @typescript-eslint/no-var-requires
          TRTC = require('trtc-miniapp')
        } else {
          // 当前项目不引入 web 版 SDK，H5 环境直接回退到原生/模拟 WebRTC
          throw new Error('TRTC Web SDK 未集成，跳过')
        }
      } catch (e) {
        console.warn('未找到 TRTC SDK，请先安装 SDK（weapp: trtc-miniapp / web: trtc-js-sdk）', e)
        throw new Error('缺少 TRTC SDK')
      }

      const sig = await fetchUserSig(String(currentUserId))
      const sdkAppId = 20027355
      const roomId = Number(String(voiceCallId).replace(/[^\d]/g, '').slice(-9)) || Date.now() % 1000000000

      // 创建 client 并进房
      const client = TRTC.createClient({ mode: 'rtc', sdkAppId, userId: String(currentUserId), userSig: sig.userSig })
      ;(voicePeerConnectionRef as any).current = client // 复用 ref 存放 client
      await client.join({ roomId })

      // 创建并发布本地麦克风音频
      if (isInitiator) {
        const localAudioTrack = await TRTC.createMicrophoneAudioTrack()
        ;(voiceLocalStreamRef as any).current = localAudioTrack
        await client.publish([localAudioTrack])
      } else {
        // 被叫也通常需要发布音频以便双向通话
        const localAudioTrack = await TRTC.createMicrophoneAudioTrack()
        ;(voiceLocalStreamRef as any).current = localAudioTrack
        await client.publish([localAudioTrack])
      }

      // 订阅远端音频并播放
      client.on('user-published', async (remoteUser: any, mediaType: string) => {
        if (mediaType === 'audio') {
          await client.subscribe(remoteUser, 'audio')
          remoteUser?.audioTrack?.play()
        }
      })

      client.on('connection-state-changed', (state: any) => {
        console.log('TRTC 连接状态:', state)
      })

      console.log('✅ TRTC 语音通话初始化完成, roomId:', roomId)
    } catch (error) {
      console.error('❌ TRTC 语音通话初始化失败:', error)
      throw error
    }
  }

  // 初始化语音通话的 WebRTC（仅音频）- 作为兜底
  const initVoiceWebRTC = async (isInitiator: boolean) => {
    try {
      console.log('🔧 初始化语音通话WebRTC...')
      console.log('📱 当前环境:', process.env.TARO_ENV)
      console.log('🔧 是否在开发者工具:', isDevTool())
      console.log('🎤 WebRTC支持情况:', typeof RTCPeerConnection !== 'undefined')

      // 检查WebRTC支持
      const webRTCSupport = checkWebRTCSupport()
      console.log('🎤 语音通话WebRTC支持情况:', webRTCSupport)
      
      if (!webRTCSupport.hasRTCPeerConnection) {
        console.warn('⚠️ RTCPeerConnection不支持，使用模拟模式')
        
        // 在开发者工具中给出特殊提示
        if (isDevTool()) {
          console.warn('⚠️ 开发者工具中WebRTC功能受限，使用模拟连接')
          Taro.showToast({
            title: '开发者工具中WebRTC功能受限，请在真机上测试完整功能',
            icon: 'none',
            duration: 3000
          })
        }
        
        // 小程序环境使用live-pusher进行语音通话
        if (process.env.TARO_ENV === 'weapp') {
          console.log('📱 小程序环境，使用live-pusher进行语音通话')
          
          // 创建模拟的WebRTC连接对象
          voicePeerConnectionRef.current = {
            addTrack: () => {},
            createOffer: () => Promise.resolve({ type: 'offer', sdp: 'mock-sdp' }),
            createAnswer: () => Promise.resolve({ type: 'answer', sdp: 'mock-sdp' }),
            setLocalDescription: () => Promise.resolve(),
            setRemoteDescription: () => Promise.resolve(),
            addIceCandidate: () => Promise.resolve(),
            close: () => {},
            onicecandidate: null,
            ontrack: null,
            onconnectionstatechange: null,
            connectionState: 'connected'
          } as any
          
          // 在模拟模式下，仍然发送offer（如果是发起方）
          if (isInitiator && socketRef.current && voiceCallId) {
            console.log('📤 模拟发送语音通话offer')
            socketRef.current.emit('voice:offer', {
              callId: voiceCallId,
              offer: { type: 'offer', sdp: 'mock-sdp' },
              to: peerId
            })
          }
          
          return
        }
        
        // 在真机上，如果WebRTC不支持，抛出错误
        if (!isDevTool()) {
          throw new Error('RTCPeerConnection不支持，无法建立语音通话连接')
        }
        
        // 开发者工具中使用模拟模式
        voicePeerConnectionRef.current = {
          addTrack: () => {
            console.log('🎤 模拟添加音频轨道')
          },
          createOffer: () => {
            console.log('🎤 模拟创建offer')
            return Promise.resolve({
              type: 'offer',
              sdp: 'mock-sdp'
            })
          },
          createAnswer: () => {
            console.log('🎤 模拟创建answer')
            return Promise.resolve({
              type: 'answer',
              sdp: 'mock-sdp'
            })
          },
          setLocalDescription: (desc) => {
            console.log('🎤 模拟设置本地描述:', desc)
            return Promise.resolve()
          },
          setRemoteDescription: (desc) => {
            console.log('🎤 模拟设置远程描述:', desc)
            return Promise.resolve()
          },
          addIceCandidate: (candidate) => {
            console.log('🎤 模拟添加ICE候选:', candidate)
            return Promise.resolve()
          },
          close: () => {
            console.log('🎤 模拟关闭连接')
          },
          onicecandidate: null,
          onconnectionstatechange: null,
          ontrack: null,
          connectionState: 'connected'
        } as any
        
        // 在模拟模式下，仍然发送offer（如果是发起方）
        if (isInitiator && socketRef.current && voiceCallId) {
          console.log('📤 模拟发送语音通话offer')
          socketRef.current.emit('voice:offer', {
            callId: voiceCallId,
            offer: { type: 'offer', sdp: 'mock-sdp' },
            to: peerId
          })
        }
        
        return
      }

      // 真机环境使用真实WebRTC（仅当未走 TRTC 时）
      console.log('✅ 使用真实WebRTC连接')
      const config = {
        iceServers: [
          { urls: 'stun:stun.l.google.com:19302' },
          { urls: 'stun:stun1.l.google.com:19302' }
        ]
      }
      voicePeerConnectionRef.current = new RTCPeerConnection(config)

      // 添加本地音频流
      if (voiceLocalStreamRef.current && voiceLocalStreamRef.current.getTracks) {
        console.log('🎤 添加本地音频流到WebRTC连接')
        voiceLocalStreamRef.current.getTracks().forEach(track => {
          voicePeerConnectionRef.current!.addTrack(track, voiceLocalStreamRef.current!)
          console.log('✅ 音频轨道已添加:', track.kind, track.label)
        })
      } else {
        console.warn('⚠️ 本地音频流为空或无效')
      }

      // 处理ICE候选
      voicePeerConnectionRef.current.onicecandidate = (event) => {
        if (event.candidate && socketRef.current && voiceCallId) {
          console.log('🧊 发送语音通话ICE候选:', event.candidate)
          socketRef.current.emit('voice:ice-candidate', {
            callId: voiceCallId,
            candidate: event.candidate,
            to: peerId
          })
        }
      }

      // 处理连接状态变化
      voicePeerConnectionRef.current.onconnectionstatechange = () => {
        console.log('🔗 语音通话连接状态变化:', voicePeerConnectionRef.current?.connectionState)
        if (voicePeerConnectionRef.current?.connectionState === 'connected') {
          Taro.showToast({ title: '语音通话已连接', icon: 'success' })
        } else if (voicePeerConnectionRef.current?.connectionState === 'disconnected') {
          Taro.showToast({ title: '语音通话已断开', icon: 'none' })
        }
      }

      // 处理远程音频流
      voicePeerConnectionRef.current.ontrack = (event) => {
        console.log('📡 收到远程音频流:', event)
        // 在小程序中，音频流会自动播放，不需要手动设置
      }

      // 如果是发起方，创建offer
      if (isInitiator) {
        console.log('📞 创建语音通话offer...')
        const offer = await voicePeerConnectionRef.current.createOffer({ 
          offerToReceiveAudio: true, 
          offerToReceiveVideo: false 
        })
        await voicePeerConnectionRef.current.setLocalDescription(offer)
        
        if (socketRef.current && voiceCallId) {
          console.log('📤 发送语音通话offer:', offer)
          socketRef.current.emit('voice:offer', {
            callId: voiceCallId,
            offer,
            to: peerId
          })
        }
      }
      
      console.log('✅ 语音通话WebRTC初始化完成')
    } catch (error) {
      console.error('❌ 语音通话初始化失败:', error)
      
      // 根据环境给出不同的错误提示
      if (isDevTool()) {
        Taro.showToast({ 
          title: '开发者工具中语音通话功能受限，请在真机上测试', 
          icon: 'none',
          duration: 3000
        })
      } else {
        Taro.showToast({ 
          title: '语音通话初始化失败，请检查网络连接', 
          icon: 'none' 
        })
      }
    }
  }

  // 处理语音通话 offer（被叫）
  const handleVoiceOffer = useCallback(async (offer: RTCSessionDescriptionInit) => {
    try {
      await voicePeerConnectionRef.current!.setRemoteDescription(offer)
      const answer = await voicePeerConnectionRef.current!.createAnswer()
      await voicePeerConnectionRef.current!.setLocalDescription(answer)
      if (socketRef.current && voiceCallId) {
        socketRef.current.emit('voice:answer', {
          callId: voiceCallId,
          answer,
          to: peerId
        })
      }
    } catch (error) {
      console.error('处理语音offer失败:', error)
      Taro.showToast({ title: '处理语音通话失败', icon: 'none' })
    }
  }, [voiceCallId, peerId])

  // 接听语音通话
  const answerVoiceCall = async (callInfo: any) => {
    try {
      console.log('📞 接听语音通话:', callInfo)
      console.log('📱 当前环境:', process.env.TARO_ENV)
      console.log('🔧 是否在开发者工具:', isDevTool())

      setVoiceCallId(callInfo.callId)
      setIsVoiceCallIncoming(false)

      // 请求麦克风权限
      const hasMic = await requestMicPermission()
      if (!hasMic) {
        Taro.showToast({ title: '需要麦克风权限', icon: 'none' })
        return
      }

      // 在开发者工具中给出特殊提示
      if (isDevTool()) {
        Taro.showToast({
          title: '开发者工具中语音通话功能受限，建议在真机上测试',
          icon: 'none',
          duration: 3000
        })
      }

      // TRTC 优先
      try {
        await initVoiceTRTC(false)
      } catch (e) {
        console.warn('TRTC 初始化失败，回退到原生/模拟 WebRTC:', e)
        const stream = await getAudioOnlyStream()
        voiceLocalStreamRef.current = stream
        await initVoiceWebRTC(false)
      }

      // 激活语音通话界面
      setIsVoiceCallActive(true)

      Taro.showToast({ 
        title: isDevTool() ? '开发者工具中语音通话功能受限' : '正在连接语音通话...', 
        icon: 'none' 
      })
    } catch (error) {
      console.error('❌ 接听语音通话失败:', error)
      
      // 根据环境给出不同的错误提示
      if (isDevTool()) {
        Taro.showToast({ 
          title: '开发者工具中语音通话功能受限，请在真机上测试', 
          icon: 'none',
          duration: 3000
        })
      } else {
        Taro.showToast({ 
          title: '无法访问麦克风，请检查权限设置', 
          icon: 'none' 
        })
      }
    }
  }

  // 拒绝语音通话
  const rejectVoiceCall = (callInfo: any) => {
    if (socketRef.current) {
      socketRef.current.emit('voice:reject', { callId: callInfo.callId, to: callInfo.from })
    }
    setIsVoiceCallIncoming(false)
    setIncomingVoiceInfo(null)
  }

  // 结束语音通话
  const endVoiceCall = useCallback(async () => {
    try {
      console.log('📞 结束语音通话...')
      console.log('📱 当前环境:', process.env.TARO_ENV)
      console.log('🔧 是否在开发者工具:', isDevTool())

      // 停止本地音频流
      if (voiceLocalStreamRef.current) {
        try { 
          voiceLocalStreamRef.current.getTracks().forEach(t => {
            console.log('🛑 停止音频轨道:', t.kind, t.label)
            t.stop()
          })
        } catch (error) {
          console.warn('⚠️ 停止音频轨道失败:', error)
        }
        voiceLocalStreamRef.current = null
      }

      // 关闭 TRTC 或 WebRTC
      if (voicePeerConnectionRef.current) {
        try {
          // 如果是 TRTC 客户端
          const client = voicePeerConnectionRef.current as any
          if (client && typeof client.leave === 'function') {
            try { await client.unpublish?.() } catch (_) {}
            try { await client.leave() } catch (_) {}
          } else if (typeof (voicePeerConnectionRef.current as any).close === 'function') {
            ;(voicePeerConnectionRef.current as any).close()
          }
        } catch (error) {
          console.warn('⚠️ 关闭连接失败:', error)
        }
        voicePeerConnectionRef.current = null
      }

      // 发送结束通话信号
      if (socketRef.current && voiceCallId) {
        console.log('📤 发送语音通话结束信号')
        socketRef.current.emit('voice:end', { callId: voiceCallId, to: peerId })
      }

      // 重置状态
      setIsVoiceCallActive(false)
      setVoiceCallId(null)

      // 根据环境给出不同的提示
      if (isDevTool()) {
        Taro.showToast({ 
          title: '开发者工具中语音通话已结束', 
          icon: 'none',
          duration: 2000
        })
      } else {
        Taro.showToast({ 
          title: '语音通话已结束', 
          icon: 'success' 
        })
      }

      console.log('✅ 语音通话结束完成')
    } catch (error) {
      console.error('❌ 结束语音通话失败:', error)
      Taro.showToast({ 
        title: '结束通话时出现错误', 
        icon: 'none' 
      })
    }
  }, [voiceCallId, peerId])

  // 获取对方用户信息
  const fetchPeerUserInfo = async (userId: string) => {
    try {
      const app = Taro.getApp()
      // @ts-ignore
      const API_BASE = app?.globalData?.apiBase || Taro.getStorageSync('API_BASE') || 'http://localhost:3000'

      const res = await Taro.request({
        url: `${API_BASE}/users/byIds`,
        method: 'GET',
        data: { ids: userId }
      })

      if ((res.statusCode as number) >= 200 && (res.statusCode as number) < 300) {
        const users = res.data as any[]
        if (users && users.length > 0) {
          const user = users[0]
          setPeerUser(user)

          // 设置导航栏标题为对方姓名
          const displayName = user.nickname || user.username || user.name || user.realName || user.displayName || `用户${userId.slice(-4)}`
          Taro.setNavigationBarTitle({ title: displayName })

          console.log('获取对方用户信息成功:', user)
        }
      }
    } catch (error) {
      console.error('获取对方用户信息失败:', error)
    }
  }

  // 获取当前用户信息
  const fetchCurrentUserInfo = async () => {
    try {
      const app = Taro.getApp()
      // @ts-ignore
      const currentUserId = app?.globalData?.currentUserId
      if (!currentUserId) return

      // @ts-ignore
      const API_BASE = app?.globalData?.apiBase || Taro.getStorageSync('API_BASE') || 'http://localhost:3000'

      const res = await Taro.request({
        url: `${API_BASE}/users/byIds`,
        method: 'GET',
        data: { ids: currentUserId }
      })

      if ((res.statusCode as number) >= 200 && (res.statusCode as number) < 300) {
        const users = res.data as any[]
        if (users && users.length > 0) {
          const user = users[0]
          setCurrentUser(user)
          console.log('获取当前用户信息成功:', user)
        }
      }
    } catch (error) {
      console.error('获取当前用户信息失败:', error)
    }
  }

  useEffect(() => {
    // 解析 chatId / peerId
    const q = router?.params || {}
    if (q?.chatId) setChatId(q.chatId)
    if (q?.peerId) {
      setPeerId(q.peerId)
      // 获取对方用户信息
      fetchPeerUserInfo(q.peerId)
    }
    // 获取当前用户信息
    fetchCurrentUserInfo()
    // 若从医院卡片带参进入，设置标题
    if (q?.targetName) {
      try { Taro.setNavigationBarTitle({ title: decodeURIComponent(q.targetName) }) } catch (_) { }
    }
    try {
      const stored = getStorageSync('recentEmojis') as string[] | undefined;
      if (stored && Array.isArray(stored)) {
        setRecentEmojis(stored);
      }
    } catch (_) { }
  }, [router?.params]);

  // 根据入参自动打开/创建会话（当没有 chatId 时）
  useEffect(() => {
    (async () => {
      const q = router?.params || {}
      console.log('聊天页面初始化参数:', q)

      if (chatId) {
        console.log('chatId已存在，跳过创建:', chatId)
        return
      }

      if (!q?.targetType || !q?.targetId) {
        console.log('缺少必要参数，无法创建会话:', { targetType: q?.targetType, targetId: q?.targetId })
        return
      }

      try {
        const app = Taro.getApp()
        // @ts-ignore
        const currentUserId = app?.globalData?.currentUserId
        // @ts-ignore
        const API_BASE = app?.globalData?.apiBase || Taro.getStorageSync('API_BASE') || 'http://localhost:3000'

        console.log('创建会话参数:', {
          currentUserId,
          API_BASE,
          targetType: q.targetType,
          targetId: q.targetId,
          targetName: q.targetName
        })

        if (!currentUserId) {
          console.error('用户未登录，无法创建会话')
          Taro.showToast({ title: '请先登录', icon: 'none' })
          return
        }

        const res = await Taro.request({
          url: `${API_BASE}/chats/openOrCreate`,
          method: 'POST',
          data: {
            fromUserId: currentUserId,
            targetType: q.targetType,
            targetId: q.targetId,
            targetName: q.targetName ? decodeURIComponent(q.targetName) : undefined
          }
        })

        console.log('创建会话响应:', res)

        if ((res.statusCode as number) >= 200 && (res.statusCode as number) < 300) {
          const c = res.data as any
          if (c && c._id) {
            setChatId(String(c._id))
            console.log('会话创建成功，chatId:', c._id)

            // 推断对端用户 id（如果是点对点会话）
            if (Array.isArray(c.participants)) {
              // @ts-ignore
              const me = String(currentUserId)
              const other = c.participants.map((x: any) => String(x)).find((id: string) => id !== me)
              if (other) {
                setPeerId(other)
                console.log('对端用户ID:', other)
                console.log('当前用户ID:', me)
                console.log('聊天参与者:', c.participants)
              }
            }
          } else {
            console.error('创建会话失败：响应数据格式错误', c)
            Taro.showToast({ title: '创建会话失败', icon: 'none' })
          }
        } else {
          console.error('创建会话失败：服务器返回错误状态码', res.statusCode)
          Taro.showToast({ title: '创建会话失败', icon: 'none' })
        }
      } catch (e) {
        console.error('打开/创建会话异常:', e)
        Taro.showToast({ title: '进入会话失败', icon: 'none' })
      }
    })()
    // eslint-disable-next-line react-hooks/exhaustive-deps
  }, [router?.params, chatId])

  // 自动刷新消息函数
  const refreshMessages = useCallback(async () => {
    if (!chatId || isAutoRefreshing) {
      console.log('⚠️ 跳过自动刷新:', { chatId, isAutoRefreshing })
      return
    }

    try {
      setIsAutoRefreshing(true)
      console.log('🔄 开始自动刷新消息')
      
      const app = Taro.getApp()
      // @ts-ignore
      const API_BASE = app?.globalData?.apiBase || Taro.getStorageSync('API_BASE') || 'http://localhost:3000'
      
      const res = await Taro.request({ 
        url: `${API_BASE}/chats/${chatId}/messages`, 
        method: 'GET',
        timeout: 5000
      })
      
      if ((res.statusCode as number) >= 200 && (res.statusCode as number) < 300) {
        const newMessages = (res.data as any[])
        console.log('🔄 自动刷新获取到消息数量:', newMessages.length)
        
        // 检查是否有新消息
        if (newMessages.length > 0) {
          const lastMessage = newMessages[newMessages.length - 1]
          const currentLastMessage = messages[messages.length - 1]
          
          // 如果最后一条消息不同，说明有新消息
          if (!currentLastMessage || lastMessage._id !== currentLastMessage._id) {
            console.log('✅ 发现新消息，更新消息列表')
            setMessages(newMessages)
            setLastRefreshTime(Date.now())
            
            // 更新最后一条消息ID
            setLastMessageId(newMessages[0]._id)
            
            // 拉取用户信息
            const ids = Array.from(new Set(newMessages.map((m) => String(m.senderId))))
            if (ids.length) {
              const realUserIds = ids.filter(id => !id.includes('_'))
              const virtualUserIds = ids.filter(id => id.includes('_'))

              const map: Record<string, any> = {}

              // 获取真实用户信息
              if (realUserIds.length) {
                try {
                  const usersRes = await Taro.request({ 
                    url: `${API_BASE}/users/byIds`, 
                    method: 'GET', 
                    data: { ids: realUserIds.join(',') },
                    timeout: 5000
                  })
                  if ((usersRes.statusCode as number) >= 200 && (usersRes.statusCode as number) < 300) {
                    (usersRes.data as any[]).forEach((u: any) => { map[String(u._id)] = u })
                  }
                } catch (error) {
                  console.warn('⚠️ 获取用户信息失败:', error)
                }
              }

              // 为虚拟用户创建默认信息
              virtualUserIds.forEach(virtualId => {
                const [type] = virtualId.split('_')
                map[virtualId] = {
                  _id: virtualId,
                  username: type === 'hospital' ? '医院客服' : type === 'doctor' ? '医生' : '客服',
                  avatar: type === 'hospital' ? 'https://images.unsplash.com/photo-1586773860418-d37222d8fce3?w=80&h=80&fit=crop' : 'https://images.unsplash.com/photo-1559839734-2b71ea197ec2?w=80&h=80&fit=crop&crop=face'
                }
              })

              setUserMap(prev => ({ ...prev, ...map }))
            }
          } else {
            console.log('ℹ️ 没有新消息')
          }
        }
      } else {
        console.warn('⚠️ 自动刷新失败，状态码:', res.statusCode)
      }
    } catch (error) {
      console.error('❌ 自动刷新异常:', error)
    } finally {
      setIsAutoRefreshing(false)
    }
  }, [chatId, messages, isAutoRefreshing])

  // 启动自动刷新
  const startAutoRefresh = useCallback(() => {
    console.log('🔄 启动自动刷新机制')
    
    // 清除现有的定时器
    if (refreshIntervalRef.current) {
      clearInterval(refreshIntervalRef.current)
    }
    
    // 设置定时刷新（每30秒检查一次）
    refreshIntervalRef.current = setInterval(() => {
      console.log('⏰ 定时刷新触发')
      refreshMessages()
    }, 30000) // 30秒间隔
  }, [refreshMessages])

  // 停止自动刷新
  const stopAutoRefresh = useCallback(() => {
    console.log('🛑 停止自动刷新机制')
    
    if (refreshIntervalRef.current) {
      clearInterval(refreshIntervalRef.current)
      refreshIntervalRef.current = null
    }
    
    if (autoRefreshTimerRef.current) {
      clearTimeout(autoRefreshTimerRef.current)
      autoRefreshTimerRef.current = null
    }
  }, [])

  // 下拉刷新处理函数
  const onRefresherRefresh = useCallback(async () => {
    if (!chatId || isRefreshing) {
      console.log('⚠️ 跳过下拉刷新:', { chatId, isRefreshing })
      return
    }

    try {
      setIsRefreshing(true)
      setRefresherTriggered(true)
      console.log('🔄 开始下拉刷新消息')
      
      const app = Taro.getApp()
      // @ts-ignore
      const API_BASE = app?.globalData?.apiBase || Taro.getStorageSync('API_BASE') || 'http://localhost:3000'
      
      const res = await Taro.request({ 
        url: `${API_BASE}/chats/${chatId}/messages`, 
        method: 'GET',
        timeout: 10000
      })
      
      if ((res.statusCode as number) >= 200 && (res.statusCode as number) < 300) {
        const newMessages = (res.data as any[])
        console.log('🔄 下拉刷新获取到消息数量:', newMessages.length)
        
        // 更新消息列表
        setMessages(newMessages)
        setLastRefreshTime(Date.now())
        
        // 更新最后一条消息ID用于分页
        if (newMessages.length > 0) {
          setLastMessageId(newMessages[0]._id)
        }
        
        // 检查是否还有更多消息
        setHasMoreMessages(newMessages.length >= 20)
        
        // 拉取用户信息
        const ids = Array.from(new Set(newMessages.map((m) => String(m.senderId))))
        if (ids.length) {
          const realUserIds = ids.filter(id => !id.includes('_'))
          const virtualUserIds = ids.filter(id => id.includes('_'))

          const map: Record<string, any> = {}

          // 获取真实用户信息
          if (realUserIds.length) {
            try {
              const usersRes = await Taro.request({ 
                url: `${API_BASE}/users/byIds`, 
                method: 'GET', 
                data: { ids: realUserIds.join(',') },
                timeout: 10000
              })
              if ((usersRes.statusCode as number) >= 200 && (usersRes.statusCode as number) < 300) {
                (usersRes.data as any[]).forEach((u: any) => { map[String(u._id)] = u })
              }
            } catch (error) {
              console.warn('⚠️ 获取用户信息失败:', error)
            }
          }

          // 为虚拟用户创建默认信息
          virtualUserIds.forEach(virtualId => {
            const [type] = virtualId.split('_')
            map[virtualId] = {
              _id: virtualId,
              username: type === 'hospital' ? '医院客服' : type === 'doctor' ? '医生' : '客服',
              avatar: type === 'hospital' ? 'https://images.unsplash.com/photo-1586773860418-d37222d8fce3?w=80&h=80&fit=crop' : 'https://images.unsplash.com/photo-1559839734-2b71ea197ec2?w=80&h=80&fit=crop&crop=face'
            }
          })

          setUserMap(prev => ({ ...prev, ...map }))
        }
        
        console.log('✅ 下拉刷新完成')
       
      } else {
        console.error('❌ 下拉刷新失败，状态码:', res.statusCode)
        Taro.showToast({ title: '刷新失败', icon: 'none' })
      }
    } catch (error) {
      console.error('❌ 下拉刷新异常:', error)
      Taro.showToast({ title: '刷新失败，请检查网络', icon: 'none' })
    } finally {
      setIsRefreshing(false)
      setRefresherTriggered(false)
    }
  }, [chatId, isRefreshing])

  // 下拉刷新状态变化处理
  const onRefresherPulling = useCallback(() => {
    console.log('🔄 下拉刷新触发中...')
  }, [])

  const onRefresherRestore = useCallback(() => {
    console.log('🔄 下拉刷新恢复中...')
  }, [])

  // 拉取消息的函数
  const fetchMessages = useCallback(async (showLoading = false) => {
    try {
      if (!chatId) {
        console.log('⚠️ chatId不存在，跳过拉取消息')
        return
      }
      
      if (showLoading) {
        Taro.showLoading({ title: '加载消息中...' })
      }
      
      console.log('📥 开始拉取消息，chatId:', chatId)
      
      const app = Taro.getApp()
      // @ts-ignore
      const API_BASE = app?.globalData?.apiBase || Taro.getStorageSync('API_BASE') || 'http://localhost:3000'
      const res = await Taro.request({ 
        url: `${API_BASE}/chats/${chatId}/messages`, 
        method: 'GET',
        timeout: 10000
      })
      
      if ((res.statusCode as number) >= 200 && (res.statusCode as number) < 300) {
        const list = (res.data as any[])
        console.log('📥 拉取到消息数量:', list.length)
        setMessages(list)
        
        // 设置最后一条消息ID用于分页
        if (list.length > 0) {
          setLastMessageId(list[0]._id)
        }
        
        // 检查是否还有更多消息
        setHasMoreMessages(list.length >= 20) // 假设每页20条消息
        
        // 拉取涉及到的用户资料（头像）
        const ids = Array.from(new Set(list.map((m) => String(m.senderId))))
        if (ids.length) {
          // 分离真实用户ID和虚拟用户ID
          const realUserIds = ids.filter(id => !id.includes('_'))
          const virtualUserIds = ids.filter(id => id.includes('_'))

          const map: Record<string, any> = {}

          // 获取真实用户信息
          if (realUserIds.length) {
            const usersRes = await Taro.request({ 
              url: `${API_BASE}/users/byIds`, 
              method: 'GET', 
              data: { ids: realUserIds.join(',') },
              timeout: 10000
            })
            if ((usersRes.statusCode as number) >= 200 && (usersRes.statusCode as number) < 300) {
              (usersRes.data as any[]).forEach((u: any) => { map[String(u._id)] = u })
            }
          }

          // 为虚拟用户创建默认信息
          virtualUserIds.forEach(virtualId => {
            const [type] = virtualId.split('_')
            map[virtualId] = {
              _id: virtualId,
              username: type === 'hospital' ? '医院客服' : type === 'doctor' ? '医生' : '客服',
              avatar: type === 'hospital' ? 'https://images.unsplash.com/photo-1586773860418-d37222d8fce3?w=80&h=80&fit=crop' : 'https://images.unsplash.com/photo-1559839734-2b71ea197ec2?w=80&h=80&fit=crop&crop=face'
            }
          })

          setUserMap(map)
        }
      } else {
        console.error('❌ 拉取消息失败，状态码:', res.statusCode)
        Taro.showToast({ title: '加载消息失败', icon: 'none' })
      }
    } catch (error) {
      console.error('❌ 拉取消息异常:', error)
      Taro.showToast({ title: '加载消息失败，请检查网络', icon: 'none' })
    } finally {
      if (showLoading) {
        Taro.hideLoading()
      }
    }
  }, [chatId])

  // 建立 Socket 连接并加入房间
  useEffect(() => {
    const app = Taro.getApp()
    // @ts-ignore
    const API_BASE = app?.globalData?.apiBase || Taro.getStorageSync('API_BASE') || 'http://localhost:3000'
    // 只在 weapp/h5 可用
    const url = API_BASE.replace(/\/$/, '')
    // @ts-ignore
    const currentUserId = app?.globalData?.currentUserId
    
    console.log('🔗 Socket连接配置:', {
      url,
      currentUserId,
      environment: process.env.TARO_ENV,
      hasExistingSocket: !!socketRef.current
    })
    
    // 添加网络连通性检查
    const checkNetworkConnectivity = async () => {
      try {
        console.log('🌐 检查网络连通性...')
        const testUrl = `${url}/api/health` // 假设后端有健康检查接口
        const response = await Taro.request({
          url: testUrl,
          method: 'GET',
          timeout: 5000
        })
        console.log('✅ 网络连通性检查成功:', response.statusCode)
        return true
      } catch (error) {
        console.error('❌ 网络连通性检查失败:', error)
        // 尝试备用URL
        try {
          const fallbackUrl = 'http://localhost:3000/api/health'
          const response = await Taro.request({
            url: fallbackUrl,
            method: 'GET',
            timeout: 5000
          })
          console.log('✅ 备用URL连通性检查成功:', response.statusCode)
          return true
        } catch (fallbackError) {
          console.error('❌ 备用URL也连接失败:', fallbackError)
          Taro.showModal({
            title: '网络连接问题',
            content: `无法连接到服务器 ${url}。请检查：\n1. 后端服务器是否正在运行\n2. 网络连接是否正常\n3. IP地址是否正确`,
            showCancel: false
          })
          return false
        }
      }
    }
    
    // 如果已有连接且用户ID相同，不重新创建
    if (socketRef.current && socketRef.current.connected) {
      console.log('🔗 Socket已连接，跳过重新创建')
      return
    }
    
    // 清理旧连接
    if (socketRef.current) {
      console.log('🔗 清理旧Socket连接')
      socketRef.current.disconnect()
      socketRef.current = null
    }
    
    if (!currentUserId) {
      console.warn('⚠️ 用户ID不存在，无法建立Socket连接')
      return
    }
    
    // 先检查网络连通性
    checkNetworkConnectivity().then(isConnected => {
      if (!isConnected) {
        console.error('❌ 网络连通性检查失败，跳过Socket连接')
        return
      }
      
      // 网络连通性检查通过，继续建立Socket连接
      establishSocketConnection(url, currentUserId)
    }).catch(error => {
      console.error('❌ 网络连通性检查异常:', error)
      Taro.showToast({ title: '网络检查失败', icon: 'none' })
    })
  }, [chatId])

  // 提取Socket连接逻辑到单独函数
  const establishSocketConnection = useCallback((url: string, currentUserId: string) => {
    try {
      console.log('🔗 开始建立Socket连接:', { url, currentUserId })
      
      // 在微信小程序环境优先使用 socket.io-miniprogram 适配器
      if (process.env.TARO_ENV === 'weapp') {
        try {
          // 按需引入，避免H5打包报错
          // eslint-disable-next-line @typescript-eslint/no-var-requires
          const mini = require('weapp.socket.io')
          const miniIO = mini?.io || mini?.default?.io || mini
          if (miniIO) {
            console.log('🔗 使用小程序Socket适配器')
            socketRef.current = miniIO(url, {
              transports: ['websocket'],
              query: { userId: currentUserId },
              reconnection: true,
              reconnectionAttempts: 5, // 限制重连次数
              reconnectionDelay: 2000,
              reconnectionDelayMax: 10000,
              timeout: 15000,
              forceNew: true
            })
          }
        } catch (e) {
          console.warn('⚠️ 小程序Socket适配器加载失败，使用标准客户端:', e)
          // 退回到标准客户端（强制仅websocket）
          socketRef.current = createIO(url, {
            transports: ['websocket'],
            query: { userId: currentUserId },
            reconnection: true,
            reconnectionAttempts: 5, // 限制重连次数
            reconnectionDelay: 2000,
            reconnectionDelayMax: 10000,
            timeout: 15000,
            forceNew: true
          })
        }
      }
      // 其它端或未成功初始化则使用标准客户端
      if (!socketRef.current) {
        console.log('🔗 使用标准Socket客户端')
        socketRef.current = createIO(url, {
          transports: ['websocket'],
          query: { userId: currentUserId },
          reconnection: true,
          reconnectionAttempts: 5, // 限制重连次数
          reconnectionDelay: 2000,
          reconnectionDelayMax: 10000,
          timeout: 15000,
          forceNew: true
        })
      }
      
      console.log('🔗 Socket客户端创建成功')
    } catch (error) {
      console.error('❌ Socket客户端创建失败:', error)
      socketRef.current = null
      Taro.showToast({ 
        title: 'Socket连接创建失败', 
        icon: 'none',
        duration: 3000
      })
      return
    }
    const s = socketRef.current
    if (s && chatId) {
      console.log('🔗 加入聊天房间:', chatId)
      s.emit('chat:join', chatId)
      
      // 断线重连后自动重新加入房间
      const onConnect = () => { 
        console.log('✅ Socket连接成功，重新加入房间:', chatId)
        try { 
          s.emit('chat:join', chatId)
          // 连接成功后，重新拉取最新消息
          setTimeout(() => {
            console.log('🔄 连接成功后重新拉取消息')
            refreshMessages()
          }, 1000)
        } catch (error) {
          console.error('❌ 重新加入房间失败:', error)
        } 
      }
      
      const onReconnect = () => { 
        console.log('🔄 Socket重连成功，重新加入房间:', chatId)
        try { 
          s.emit('chat:join', chatId)
          // 重连成功后，重新拉取最新消息
          setTimeout(() => {
            console.log('🔄 重连成功后重新拉取消息')
            refreshMessages()
          }, 1000)
        } catch (error) {
          console.error('❌ 重连后加入房间失败:', error)
        } 
      }
      
      const onConnectError = (err: any) => { 
        console.error('❌ Socket连接错误:', err)
       
      }
      
      const onError = (err: any) => { 
        console.error('❌ Socket错误:', err)
        Taro.showToast({ title: 'Socket连接错误', icon: 'none', duration: 2000 })
      }
      
      const onDisconnect = () => {
        console.log('🔌 Socket断开连接')
      }
      
      const onNew = (payload: any) => {
        console.log('📨 收到新消息事件:', payload)
        if (payload && String(payload.chatId) === String(chatId)) {
          console.log('✅ 消息属于当前聊天，添加到列表')
          pushUniqueMessage(payload.message)
        } else {
          console.log('⚠️ 消息不属于当前聊天，跳过:', {
            payloadChatId: payload?.chatId,
            currentChatId: chatId
          })
        }
      }
      const onTyping = (_payload: any) => {
        // 输入状态处理已移除
      }
      const onReadAll = (payload: any) => {
        if (payload && String(payload.chatId) === String(chatId)) {
          const uid = String(payload.userId)
          setMessages((prev) => prev.map((m) => {
            const has = Array.isArray(m.readBy) && m.readBy.some((x: any) => String(x) === uid)
            return has ? m : { ...m, readBy: [...(m.readBy || []), uid] }
          }))
        }
      }
      const onOnline = (_p: any) => { /* 在线状态处理已移除 */ }
      const onOffline = (_p: any) => { /* 离线状态处理已移除 */ }

      // 视频通话事件处理
      const onVideoIncoming = (payload: any) => {
        console.log('收到视频通话邀请:', payload)
        if (payload && payload.from && payload.callId) {
          setIncomingCallInfo(payload)
          setIsVideoCallIncoming(true)
          // 使用自定义弹窗而不是Taro.showModal，因为需要更好的UI
        }
      }

      const onVideoOffer = async (payload: any) => {
        console.log('收到视频通话offer:', payload)
        if (payload && payload.callId === videoCallId && payload.offer) {
          await handleVideoOffer(payload.offer)
        }
      }

      const onVideoAnswer = async (payload: any) => {
        console.log('收到视频通话answer:', payload)
        if (payload && payload.callId === videoCallId && payload.answer) {
          await peerConnectionRef.current!.setRemoteDescription(payload.answer)
        }
      }

      const onVideoIceCandidate = async (payload: any) => {
        console.log('收到ICE候选:', payload)
        if (payload && payload.callId === videoCallId && payload.candidate) {
          await peerConnectionRef.current!.addIceCandidate(payload.candidate)
        }
      }

      const onVideoEnd = (payload: any) => {
        console.log('收到视频通话结束信号:', payload)
        if (payload && payload.callId === videoCallId) {
          endVideoCall()
          Taro.showToast({ title: '对方已结束视频通话', icon: 'none' })
        }
      }

      const onVideoReject = (payload: any) => {
        console.log('视频通话被拒绝:', payload)
        if (payload && payload.callId === videoCallId) {
          setIsVideoCallActive(false)
          setVideoCallId(null)
          Taro.showToast({ title: '对方拒绝了视频通话', icon: 'none' })
        }
      }
      // 语音通话事件
      const onVoiceIncoming = (payload: any) => {
        console.log('收到语音通话邀请:', payload)
        if (payload && payload.from && payload.callId) {
          setIncomingVoiceInfo(payload)
          setIsVoiceCallIncoming(true)
        }
      }
      const onVoiceOffer = async (payload: any) => {
        console.log('收到语音通话offer:', payload)
        if (payload && payload.callId === voiceCallId && payload.offer) {
          await handleVoiceOffer(payload.offer)
        }
      }
      const onVoiceAnswer = async (payload: any) => {
        console.log('收到语音通话answer:', payload)
        if (payload && payload.callId === voiceCallId && payload.answer) {
          await voicePeerConnectionRef.current!.setRemoteDescription(payload.answer)
        }
      }
      const onVoiceIce = async (payload: any) => {
        console.log('收到语音 ICE 候选:', payload)
        if (payload && payload.callId === voiceCallId && payload.candidate) {
          await voicePeerConnectionRef.current!.addIceCandidate(payload.candidate)
        }
      }
      const onVoiceEnd = (payload: any) => {
        console.log('收到语音通话结束信号:', payload)
        if (payload && payload.callId === voiceCallId) {
          endVoiceCall()
          Taro.showToast({ title: '对方已结束语音通话', icon: 'none' })
        }
      }
      const onVoiceReject = (payload: any) => {
        console.log('语音通话被拒绝:', payload)
        if (payload && payload.callId === voiceCallId) {
          setIsVoiceCallActive(false)
          setVoiceCallId(null)
          Taro.showToast({ title: '对方拒绝了语音通话', icon: 'none' })
        }
      }
      s.on('connect', onConnect)
      // @ts-ignore: socket.io v4 提供 reconnecting/reconnect 事件
      s.on('reconnect', onReconnect)
      s.on('connect_error', onConnectError)
      s.on('error', onError)
      s.on('disconnect', onDisconnect)
      s.on('chat:newMessage', onNew)
      s.on('chat:typing', onTyping)
      s.on('chat:readAll', onReadAll)
      s.on('presence:online', onOnline)
      s.on('presence:offline', onOffline)

      // 视频通话相关事件
      s.on('video:incoming', onVideoIncoming)
      s.on('video:offer', onVideoOffer)
      s.on('video:answer', onVideoAnswer)
      s.on('video:ice-candidate', onVideoIceCandidate)
      s.on('video:end', onVideoEnd)
      s.on('video:reject', onVideoReject)
      s.on('voice:incoming', onVoiceIncoming)
      s.on('voice:offer', onVoiceOffer)
      s.on('voice:answer', onVoiceAnswer)
      s.on('voice:ice-candidate', onVoiceIce)
      s.on('voice:end', onVoiceEnd)
      s.on('voice:reject', onVoiceReject)
      return () => {
        try { s.emit('chat:leave', chatId) } catch (_) { }
        s.off('connect', onConnect)
        // @ts-ignore
        s.off('reconnect', onReconnect)
        s.off('connect_error', onConnectError)
        s.off('error', onError)
        s.off('disconnect', onDisconnect)
        s.off('chat:newMessage', onNew)
        s.off('chat:typing', onTyping)
        s.off('chat:readAll', onReadAll)
        s.off('presence:online', onOnline)
        s.off('presence:offline', onOffline)
        s.off('video:incoming', onVideoIncoming)
        s.off('video:offer', onVideoOffer)
        s.off('video:answer', onVideoAnswer)
        s.off('video:ice-candidate', onVideoIceCandidate)
        s.off('video:end', onVideoEnd)
        s.off('video:reject', onVideoReject)
        s.off('voice:incoming', onVoiceIncoming)
        s.off('voice:offer', onVoiceOffer)
        s.off('voice:answer', onVoiceAnswer)
        s.off('voice:ice-candidate', onVoiceIce)
        s.off('voice:end', onVoiceEnd)
        s.off('voice:reject', onVoiceReject)
      }
    }
  }, [peerId, pushUniqueMessage, handleVideoOffer, handleVoiceOffer, endVideoCall, endVoiceCall, videoCallId, voiceCallId, refreshMessages])

  // 页面隐藏时停止自动刷新
  useEffect(() => {
    const handlePageHide = () => {
      console.log('📱 页面隐藏，停止自动刷新')
      stopAutoRefresh()
    }

    const handlePageShow = () => {
      console.log('📱 页面显示，启动自动刷新')
      if (chatId) {
        startAutoRefresh()
        refreshMessages()
      }
    }

    // 监听页面显示/隐藏事件
    if (process.env.TARO_ENV === 'h5') {
      document.addEventListener('visibilitychange', () => {
        if (document.hidden) {
          handlePageHide()
        } else {
          handlePageShow()
        }
      })
    }

    return () => {
      if (process.env.TARO_ENV === 'h5') {
        document.removeEventListener('visibilitychange', handlePageHide)
      }
    }
  }, [chatId, startAutoRefresh, stopAutoRefresh, refreshMessages])

  // 清理函数
  useEffect(() => {
    return () => {
      // 停止自动刷新
      stopAutoRefresh()
      
      // 清理录音计时器
      if (recordingTimerRef.current) {
        clearInterval(recordingTimerRef.current);
        recordingTimerRef.current = null
      }
      // 清理播放进度计时器
      if (playProgressTimerRef.current) {
        clearInterval(playProgressTimerRef.current);
        playProgressTimerRef.current = null
      }
      // 停止录音
      try {
        if (recorderManagerRef.current && isRecording) {
          recorderManagerRef.current.stop();
        }
      } catch (_) { }
      // 停止并销毁所有音频实例
      try {
        voicePlayerManager.destroyAll()
      } catch (_) { }
      // 清理视频通话
      try {
        if (isVideoCallActive) {
          endVideoCall()
        }
      } catch (_) { }
      // 清理语音通话
      try {
        if (isVoiceCallActive) {
          endVoiceCall()
        }
      } catch (_) { }
    };
  }, [playingVoiceId, isRecording, isVideoCallActive, isVoiceCallActive, endVideoCall, endVoiceCall, stopAutoRefresh]);


  // 加载更多历史消息
  const loadMoreMessages = useCallback(async () => {
    if (!chatId || !hasMoreMessages || isLoadingMore || !lastMessageId) {
      console.log('⚠️ 无法加载更多消息:', { chatId, hasMoreMessages, isLoadingMore, lastMessageId })
      return
    }

    try {
      setIsLoadingMore(true)
      console.log('📥 开始加载更多消息，lastMessageId:', lastMessageId)
      
      const app = Taro.getApp()
      // @ts-ignore
      const API_BASE = app?.globalData?.apiBase || Taro.getStorageSync('API_BASE') || 'http://localhost:3000'
      
      const res = await Taro.request({ 
        url: `${API_BASE}/chats/${chatId}/messages`, 
        method: 'GET',
        data: { 
          before: lastMessageId,
          limit: 20
        },
        timeout: 10000
      })
      
      if ((res.statusCode as number) >= 200 && (res.statusCode as number) < 300) {
        const newMessages = (res.data as any[])
        console.log('📥 加载到更多消息数量:', newMessages.length)
        
        if (newMessages.length > 0) {
          // 将新消息添加到现有消息列表的前面
          setMessages(prev => [...newMessages, ...prev])
          
          // 更新最后一条消息ID
          setLastMessageId(newMessages[0]._id)
          
          // 检查是否还有更多消息
          setHasMoreMessages(newMessages.length >= 20)
          
          // 更新用户信息
          const ids = Array.from(new Set(newMessages.map((m) => String(m.senderId))))
          if (ids.length) {
            const realUserIds = ids.filter(id => !id.includes('_'))
            const virtualUserIds = ids.filter(id => id.includes('_'))

            // 获取真实用户信息
            if (realUserIds.length) {
              const usersRes = await Taro.request({ 
                url: `${API_BASE}/users/byIds`, 
                method: 'GET', 
                data: { ids: realUserIds.join(',') },
                timeout: 10000
              })
              if ((usersRes.statusCode as number) >= 200 && (usersRes.statusCode as number) < 300) {
                setUserMap(prev => {
                  const newMap = { ...prev }
                  ;(usersRes.data as any[]).forEach((u: any) => { 
                    newMap[String(u._id)] = u 
                  })
                  return newMap
                })
              }
            }

            // 为虚拟用户创建默认信息
            setUserMap(prev => {
              const newMap = { ...prev }
              virtualUserIds.forEach(virtualId => {
                if (!newMap[virtualId]) {
                  const [type] = virtualId.split('_')
                  newMap[virtualId] = {
                    _id: virtualId,
                    username: type === 'hospital' ? '医院客服' : type === 'doctor' ? '医生' : '客服',
                    avatar: type === 'hospital' ? 'https://images.unsplash.com/photo-1586773860418-d37222d8fce3?w=80&h=80&fit=crop' : 'https://images.unsplash.com/photo-1559839734-2b71ea197ec2?w=80&h=80&fit=crop&crop=face'
                  }
                }
              })
              return newMap
            })
          }
        } else {
          // 没有更多消息了
          setHasMoreMessages(false)
        }
      } else {
        console.error('❌ 加载更多消息失败，状态码:', res.statusCode)
        Taro.showToast({ title: '加载更多消息失败', icon: 'none' })
      }
    } catch (error) {
      console.error('❌ 加载更多消息异常:', error)
      Taro.showToast({ title: '加载更多消息失败，请检查网络', icon: 'none' })
    } finally {
      setIsLoadingMore(false)
    }
  }, [chatId, hasMoreMessages, isLoadingMore, lastMessageId])


  // 拉取消息
  useEffect(() => {
    fetchMessages(true)
  }, [fetchMessages])



  // 进入会话后将消息标记为已读（针对当前用户）
  useEffect(() => {
    (async () => {
      try {
        if (!chatId) return
        const app = Taro.getApp()
        // @ts-ignore
        const currentUserId = app?.globalData?.currentUserId
        if (!currentUserId) return
        // @ts-ignore
        const API_BASE = app?.globalData?.apiBase || Taro.getStorageSync('API_BASE') || 'http://localhost:3000'
        await Taro.request({ url: `${API_BASE}/chats/${chatId}/readAll`, method: 'POST', data: { userId: currentUserId } })
      } catch (_) { }
    })()
  }, [chatId])

  // 切换表情面板
  const toggleEmojiPanel = (e) => {
    e.stopPropagation();
    setShowEmojiPanel(!showEmojiPanel);
  };

  // 切换额外功能面板
  const toggleExtraFunction = () => {
    setShowExtraFunction(!showExtraFunction);
  };

  // 点击外部关闭面板
  useEffect(() => {
    const handleClickOutside = () => setShowEmojiPanel(false);
    document.addEventListener('click', handleClickOutside);
    return () => document.removeEventListener('click', handleClickOutside);
  }, []);

  // 定位面板（H5端生效，小程序需使用 Taro API 调整位置）
  useEffect(() => {
    if (!showEmojiPanel || !panelRef.current || !inputRef.current) return;

    // H5端：直接操作DOM
    if (process.env.TARO_ENV === 'h5') {
      const inputRect = inputRef.current.getBoundingClientRect();
      const panelRect = panelRef.current.getBoundingClientRect();
      panelRef.current.style.left = `${inputRect.right + 10}px`;
      panelRef.current.style.top = `${inputRect.top + inputRect.height / 2 - panelRect.height / 2}px`;
    }

    // 小程序端：需使用 Taro.createSelectorQuery() 获取节点位置（示例略）
  }, [showEmojiPanel]);

  // 插入表情
  const insertEmoji = (emoji) => {
    const input = inputRef.current;
    if (!input) return;

    // 获取光标位置（H5和小程序逻辑不同）
    if (process.env.TARO_ENV === 'h5') {
      const startPos = input.selectionStart;
      const endPos = input.selectionEnd;
      const newValue = inputValue.substring(0, startPos) + emoji + inputValue.substring(endPos);
      setInputValue(newValue);
      setTimeout(() => {
        input.focus();
        input.setSelectionRange(startPos + emoji.length, startPos + emoji.length);
      }, 0);
    } else {
      // 小程序端：直接拼接（暂不支持精确光标位置）
      setInputValue(inputValue + emoji);
    }

    // 维护最近使用
    setRecentEmojis((prev) => {
      const next = [emoji, ...prev.filter((e) => e !== emoji)].slice(0, 24);
      try { setStorageSync('recentEmojis', next); } catch (_) { }
      return next;
    });

    setShowEmojiPanel(false);
  };

  // 初始化录音管理器
  const initRecorderManager = () => {
    if (!recorderManagerRef.current) {
      recorderManagerRef.current = Taro.getRecorderManager();

      // 录音开始事件
      recorderManagerRef.current.onStart(() => {
        setIsRecording(true);
        setRecordingDuration(0);
        recordingStartTimeRef.current = Date.now();

        // 开始计时（以自增为准，避免某些环境下 Date.now 计算导致不更新）
        recordingTimerRef.current = setInterval(() => {
          setRecordingDuration((prev) => {
            const next = prev + 1;
            if (next >= 60) {
              stopRecording();
            }
            return next;
          });
        }, 1000);

        Taro.showToast({
          title: '开始录音',
          icon: 'none'
        });
      });

      // 录音结束事件
      recorderManagerRef.current.onStop((res) => {
        setIsRecording(false);
        // 清除计时器
        if (recordingTimerRef.current) {
          clearInterval(recordingTimerRef.current);
          recordingTimerRef.current = null;
        }
        // 以返回的 duration 或开始时间为准计算秒数
        const ms = (res && typeof res.duration === 'number' && res.duration > 0)
          ? res.duration
          : Math.max(0, Date.now() - (recordingStartTimeRef.current || Date.now()))
        const seconds = Math.round(ms / 1000)
        if (seconds >= 1) {
          sendVoiceMessage(res.tempFilePath, seconds);
        } else {
          Taro.showToast({ title: '录音时间太短', icon: 'none' });
        }
        setRecordingDuration(0);
      });

      // 录音错误事件
      recorderManagerRef.current.onError((err) => {
        console.error('录音失败', err);
        setIsRecording(false);
        setRecordingDuration(0);
        if (recordingTimerRef.current) {
          clearInterval(recordingTimerRef.current);
          recordingTimerRef.current = null;
        }
        Taro.showToast({
          title: '录音失败',
          icon: 'none'
        });
      });
    }
  };

  // 开始录音（按下）
  const startRecording = () => {
    if (isRecording) return;
    // 仅小程序端
    if (process.env.TARO_ENV !== 'weapp') {
      Taro.showToast({ title: '仅支持小程序端录音', icon: 'none' })
      return
    }
    const app = Taro.getApp();
    // @ts-ignore
    if (!app?.globalData?.hasRecordPermission) {
      Taro.showToast({ title: '需要录音权限', icon: 'none' })
      return
    }
    // 初始化录音管理器
    initRecorderManager();
    try {
      // 提前将 UI 置为录音中并启动自增计时，规避极端机型 onStart 迟到导致 0s 卡住
      setIsRecording(true)
      setRecordingDuration(0)
      if (recordingTimerRef.current) clearInterval(recordingTimerRef.current)
      recordingTimerRef.current = setInterval(() => {
        setRecordingDuration((prev) => {
          const next = prev + 1
          if (next >= 60) {
            stopRecording()
          }
          return next
        })
      }, 1000)
      // 尝试启动录音（使用更兼容的配置）
      recorderManagerRef.current.start({
        duration: 60000,
        format: 'aac',
        audioSource: 'auto',
        sampleRate: 16000,
        numberOfChannels: 1,
        encodeBitRate: 96000
      });
    } catch (err) {
      console.error('开始录音失败:', err)
      Taro.showToast({ title: '开始录音失败', icon: 'none' })
      setIsRecording(false)
      if (recordingTimerRef.current) {
        clearInterval(recordingTimerRef.current)
        recordingTimerRef.current = null
      }
      setRecordingDuration(0)
    }
  };

  // 停止录音
  const stopRecording = () => {
    if (!isRecording || !recorderManagerRef.current) return;

    recorderManagerRef.current.stop();
  };


  // 发送语音消息
  const sendVoiceMessage = async (filePath: string, duration: number) => {
    try {
      const app = Taro.getApp();
      // @ts-ignore
      const currentUserId = app?.globalData?.currentUserId;
      // @ts-ignore
      const API_BASE = app?.globalData?.apiBase || Taro.getStorageSync('API_BASE') || 'http://localhost:3000';

      if (!chatId) return;

      // 上传语音文件
      const uploadRes = await Taro.uploadFile({
        url: `${API_BASE}/upload`,
        filePath: filePath,
        name: 'file',
        formData: {
          type: 'voice',
          ext: 'aac'
        }
      });

      if (uploadRes.statusCode >= 200 && uploadRes.statusCode < 300) {
        const uploadData = JSON.parse(uploadRes.data);

        // 发送语音消息
        const res = await Taro.request({
          url: `${API_BASE}/chats/${chatId}/messages`,
          method: 'POST',
          data: {
            senderId: currentUserId,
            content: `[语音消息]`,
            type: 'voice',
            voiceUrl: uploadData.url,
            voiceDuration: duration
          }
        });

        if (res.statusCode >= 200 && res.statusCode < 300) {
          setMessages((prev) => [...prev, res.data]);
        }
      }
    } catch (error) {
      console.error('发送语音消息失败:', error);
      Taro.showToast({
        title: '发送失败',
        icon: 'none'
      });
    }
  };

  // 选择、上传并发送图片
  const uploadAndSendImage = async (filePath: string, width?: number, height?: number) => {
    try {
      const app = Taro.getApp();
      // @ts-ignore
      const currentUserId = app?.globalData?.currentUserId;
      // @ts-ignore
      const API_BASE = app?.globalData?.apiBase || Taro.getStorageSync('API_BASE') || 'http://localhost:3000';
      if (!chatId) return;

      // 尝试根据路径或信息推断扩展名
      let ext = ''
      try {
        const lower = (filePath || '').toLowerCase()
        if (lower.endsWith('.png')) ext = 'png'
        else if (lower.endsWith('.jpg') || lower.endsWith('.jpeg')) ext = 'jpg'
        else if (lower.endsWith('.webp')) ext = 'webp'
      } catch (_) { }

      const uploadRes = await Taro.uploadFile({
        url: `${API_BASE}/upload`,
        filePath,
        name: 'file',
        formData: { type: 'image', ext }
      });
      if (uploadRes.statusCode >= 200 && uploadRes.statusCode < 300) {
        const uploadData = JSON.parse(uploadRes.data);
        const res = await Taro.request({
          url: `${API_BASE}/chats/${chatId}/messages`,
          method: 'POST',
          data: {
            senderId: currentUserId,
            type: 'image',
            imageUrl: uploadData.url,
            imageWidth: width,
            imageHeight: height
          }
        })
        if (res.statusCode >= 200 && res.statusCode < 300) {
          setMessages((prev) => [...prev, res.data]);
        }
      }
    } catch (e) {
      console.error('发送图片失败:', e)
      Taro.showToast({ title: '发送失败', icon: 'none' })
    }
  }

  const chooseAndSendImage = async (source: 'album' | 'camera') => {
    try {
      if (process.env.TARO_ENV !== 'weapp') {
        Taro.showToast({ title: '仅支持小程序端', icon: 'none' })
        return
      }
      const choose = await Taro.chooseImage({ count: 1, sizeType: ['compressed'], sourceType: [source] })
      const filePath = choose.tempFilePaths?.[0]
      if (!filePath) return
      let w: number | undefined
      let h: number | undefined
      try {
        const info = await Taro.getImageInfo({ src: filePath })
        w = info.width
        h = info.height
      } catch (_) { }
      await uploadAndSendImage(filePath, w, h)
    } catch (e) {
      console.error('选择图片失败:', e)

    }
  }
  
  // 图片加载失败时的本地临时路径缓存
  const imageLocalUrlMapRef = useRef<Record<string, string>>({})
  const [imageRerenderTick, setImageRerenderTick] = useState(0)
  const getMsgId = (m: any, i: number) => (m._id || `${m.createdAt}-${m.senderId}-${i}`)
  const getImageSrc = (m: any, i: number) => {
    const id = getMsgId(m, i)
    return imageLocalUrlMapRef.current[id] || m.imageUrl
  }
  const handleImageError = async (m: any, i: number) => {
    try {
      const src = m?.imageUrl
      if (!src) return
      const id = getMsgId(m, i)
      if (imageLocalUrlMapRef.current[id]) return
      const dl = await Taro.downloadFile({ url: src })
      if (dl.statusCode >= 200 && dl.statusCode < 300 && dl.tempFilePath) {
        imageLocalUrlMapRef.current[id] = dl.tempFilePath
        setImageRerenderTick((v) => v + 1)
      }
    } catch (e) {
      console.error('图片下载失败:', e)
    }
  }

  // 停止语音播放
  const stopVoicePlayback = () => {
    setPlayingVoiceId(null)
    setVoicePlayProgress(0)
    setVoicePlayCurrentTime(0)
    setVoicePlayDuration(0)
    setPlaybackError(null)

    if (playProgressTimerRef.current) {
      clearInterval(playProgressTimerRef.current)
      playProgressTimerRef.current = null
    }
  }

  // 处理播放错误
  const handlePlaybackError = async (err: any) => {
    const currentId = playingVoiceId
    if (!currentId) return

    console.error('❌ 语音播放错误:', {
      error: err,
      currentId: currentId,
      errorType: typeof err,
      errorMessage: err?.message || err?.errMsg || '未知错误'
    })

    // 尝试下载本地播放
    try {
      const currentMessage = messages.find(m =>
        (m._id || `${m.createdAt}-${m.senderId}-${messages.indexOf(m)}`) === currentId
      )

      if (currentMessage?.voiceUrl) {
        console.log('🔄 尝试下载语音文件进行本地播放:', currentMessage.voiceUrl)

        // 验证URL
        if (!currentMessage.voiceUrl.startsWith('http')) {
          console.error('❌ 语音URL格式无效:', currentMessage.voiceUrl)
          throw new Error('语音URL格式无效')
        }

        const dl = await Taro.downloadFile({ url: currentMessage.voiceUrl })
        console.log('📥 下载结果:', {
          statusCode: dl.statusCode,
          tempFilePath: dl.tempFilePath,
          success: dl.statusCode >= 200 && dl.statusCode < 300
        })

        if (dl.statusCode >= 200 && dl.statusCode < 300 && dl.tempFilePath) {
          console.log('✅ 下载成功，尝试本地播放')
          // 使用播放器管理器重新播放
          await voicePlayerManager.play(currentId, dl.tempFilePath, {
            onPlay: () => {
              console.log('✅ 本地播放开始')
              setPlaybackError(null)
            },
            onEnded: () => {
              console.log('🏁 本地播放结束')
              stopVoicePlayback()
            },
            onError: (error) => {
              console.error('❌ 本地播放也失败:', error)
              stopVoicePlayback()
              setPlaybackError('播放失败，请检查网络连接')
              Taro.showToast({
                title: '播放失败',
                icon: 'none',
                duration: 2000
              })
            },
            onTimeUpdate: (currentTime, duration) => {
              console.log('⏰ 本地播放时间更新:', { currentTime, duration })
              const progress = (currentTime / duration) * 100
              setVoicePlayProgress(progress)
              setVoicePlayCurrentTime(currentTime)
              setVoicePlayDuration(duration)
            }
          })
          return
        } else {
          console.error('❌ 下载失败:', dl)
          throw new Error(`下载失败，状态码: ${dl.statusCode}`)
        }
      } else {
        console.error('❌ 未找到当前语音消息')
        throw new Error('未找到语音消息')
      }
    } catch (e) {
      console.error('❌ 下载语音失败:', e)
    }

    // 所有重试都失败
    console.log('❌ 所有播放尝试都失败')
    stopVoicePlayback()
    setPlaybackError('播放失败，请检查网络连接')
    Taro.showToast({
      title: '播放失败',
      icon: 'none',
      duration: 2000
    })
  }

  // 语音播放调试功能
  const debugVoicePlayback = async (voiceUrl: string) => {
    console.log('🔍 开始语音播放调试...')
    
    try {
      // 收集调试信息
      const debugInfo = await voiceDebugger.collectDebugInfo()
      console.log('📊 调试信息收集完成:', debugInfo)
      
      // 生成调试报告
      const report = voiceDebugger.generateDebugReport()
      console.log('📋 调试报告:', report)
      
      // 测试语音播放
      const testResult = await voiceDebugger.testVoicePlayback(voiceUrl)
      console.log('🧪 语音播放测试结果:', testResult)
    } catch (error) {
      console.error('❌ 语音播放调试失败:', error)
      Taro.showToast({ 
        title: `调试失败: ${error.message}`, 
        icon: 'none',
        duration: 3000
      })
    }
  }

  // 微信小程序专用的语音播放测试函数
  const testVoicePlay = async (voiceUrl: string) => {
    console.log('🧪 微信小程序测试语音播放:', voiceUrl)
    console.log('📱 当前环境:', process.env.TARO_ENV)
    console.log('🔧 是否在开发者工具:', isDevTool())

    // 使用新的 API 获取系统信息
    try {
      // @ts-ignore
      const systemInfo = typeof wx !== 'undefined' ? wx.getDeviceInfo() : null
      console.log('🌐 设备信息:', systemInfo)
    } catch (error) {
      console.log('⚠️ 获取设备信息失败:', error)
    }

    // 检查是否在微信小程序环境
    if (process.env.TARO_ENV !== 'weapp') {
      console.log('⚠️ 当前不在微信小程序环境')
      Taro.showToast({ title: '请在微信开发者工具中测试', icon: 'none' })
      return
    }

    // 如果在开发者工具中，显示提示
    if (isDevTool()) {
      console.log('⚠️ 当前在开发者工具中，录音文件格式可能与移动端不同')
      Taro.showToast({
        title: '开发者工具中录音格式不同，请在真机上测试',
        icon: 'none',
        duration: 3000
      })
    }

    // 检查URL格式
    if (!voiceUrl || !voiceUrl.startsWith('http')) {
      console.error('❌ 语音URL格式无效:', voiceUrl)
      Taro.showToast({ title: '语音文件地址无效', icon: 'none' })
      return
    }

    try {
      // 先测试网络请求
      console.log('🌐 测试语音文件网络请求...')
      try {
        const testRequest = await Taro.request({
          url: voiceUrl,
          method: 'HEAD',
          timeout: 5000
        })
        console.log('✅ 语音文件网络请求成功:', testRequest.statusCode)
      } catch (networkError) {
        console.warn('⚠️ 语音文件网络请求失败:', networkError)
        // 继续尝试播放，可能是CORS问题
      }

      const audioContext = Taro.createInnerAudioContext()
      console.log('✅ 音频上下文创建成功')

      // 配置音频播放器
      audioContext.obeyMuteSwitch = false
      audioContext.volume = 1.0
      audioContext.src = voiceUrl

      console.log('📡 设置音频源:', voiceUrl)

      // 绑定事件
      audioContext.onPlay(() => {
        console.log('✅ 微信小程序播放成功')

      })

      audioContext.onError((err) => {
        console.error('❌ 微信小程序播放失败:', err)
        Taro.showToast({
          title: `播放失败: ${err?.errMsg || '未知错误'}`,
          icon: 'none',
          duration: 3000
        })
      })

      audioContext.onCanplay(() => {
        console.log('🎵 音频可以播放')
      })

      audioContext.onTimeUpdate(() => {
        console.log('⏰ 播放时间更新:', {
          currentTime: audioContext.currentTime,
          duration: audioContext.duration
        })
      })

      // 开始播放
      audioContext.play()
      console.log('▶️ 开始播放音频')

    } catch (error) {
      console.error('❌ 创建音频上下文失败:', error)
      Taro.showToast({
        title: `创建播放器失败: ${error.message || '未知错误'}`,
        icon: 'none',
        duration: 3000
      })
    }
  }

  // 播放语音
  const playVoice = async (voiceUrl: string, messageId: string) => {
    console.log('🎵 开始播放语音:', {
      voiceUrl,
      messageId,
      platform: process.env.TARO_ENV,
      voicePlayerManager: !!voicePlayerManager
    })

    // 检查平台支持
    if (process.env.TARO_ENV === 'rn') {
      Taro.showToast({ title: '暂不支持RN端播放', icon: 'none' })
      return
    }

    // 验证语音URL
    if (!voiceUrl || !voiceUrl.startsWith('http')) {
      console.error('❌ 无效的语音URL:', voiceUrl)
      Taro.showToast({ title: '语音文件地址无效', icon: 'none' })
      return
    }

    // 如果点击的是当前播放的语音，则停止播放
    if (playingVoiceId === messageId) {
      console.log('⏹️ 停止当前播放的语音')
      voicePlayerManager.stopAll()
      stopVoicePlayback()
      return
    }

    // 停止当前播放的语音
    if (playingVoiceId) {
      console.log('⏹️ 停止其他播放的语音')
      voicePlayerManager.stopAll()
    }

    // 设置播放状态
    console.log('🔄 设置播放状态')
    setPlayingVoiceId(messageId)
    setVoicePlayProgress(0)
    setVoicePlayCurrentTime(0)
    setVoicePlayDuration(0)
    setPlaybackError(null)

    try {
      console.log('🔧 准备使用播放器管理器播放')
      console.log('📊 播放器管理器状态:', {
        hasManager: !!voicePlayerManager,
        currentPlayer: voicePlayerManager.getCurrentPlayer()
      })

      // 使用播放器管理器播放
      await voicePlayerManager.play(messageId, voiceUrl, {
        onPlay: () => {
          console.log('✅ 语音开始播放')
          setPlaybackError(null)
        },
        onEnded: () => {
          console.log('🏁 语音播放结束')
          stopVoicePlayback()
        },
        onStop: () => {
          console.log('⏹️ 语音播放停止')
          stopVoicePlayback()
        },
        onPause: () => {
          console.log('⏸️ 语音播放暂停')
        },
        onError: async (error) => {
          console.error('❌ 播放语音失败:', error)
          await handlePlaybackError(error)
        },
        onCanplay: () => {
          console.log('🎵 音频加载完成')
          // 确保在音频加载完成时设置时长
          const player = voicePlayerManager.getCurrentPlayer()
          if (player) {
            const state = player.getState()
            console.log('📊 播放器状态:', state)
            if (state.duration > 0) {
              setVoicePlayDuration(state.duration)
              console.log('✅ 设置语音时长:', state.duration)
            } else {
              console.log('⚠️ 音频时长未获取，尝试延迟获取')
              // 延迟再次尝试获取时长
              setTimeout(() => {
                const retryState = player.getState()
                if (retryState.duration > 0) {
                  setVoicePlayDuration(retryState.duration)
                  console.log('✅ 延迟获取语音时长成功:', retryState.duration)
                }
              }, 500)
            }
          }
        },
        onTimeUpdate: (currentTime, duration) => {
          console.log('⏰ 时间更新事件触发:', {
            currentTime: currentTime,
            duration: duration,
            progress: duration > 0 ? (currentTime / duration) * 100 : 0,
            messageId: messageId
          })
          const progress = duration > 0 ? (currentTime / duration) * 100 : 0
          setVoicePlayProgress(progress)
          setVoicePlayCurrentTime(currentTime)
          setVoicePlayDuration(duration)
        }
      })

      console.log('✅ 开始播放语音:', voiceUrl)
    } catch (error) {
      console.error('❌ 播放语音失败:', error)
      await handlePlaybackError(error)
    }
  }

  // 暂停语音播放
  const pauseVoice = useCallback(() => {
    if (playingVoiceId) {
      const player = voicePlayerManager.getCurrentPlayer()
      if (player) {
        player.pause()
      }
    }
  }, [playingVoiceId])

  // 恢复语音播放
  const resumeVoice = useCallback(async () => {
    if (playingVoiceId) {
      const player = voicePlayerManager.getCurrentPlayer()
      if (player) {
        try {
          // 直接调用resume方法，不需要重新设置src
          player.resume()
        } catch (error) {
          console.error('恢复播放失败:', error)
        }
      }
    }
  }, [playingVoiceId])

  // 设置播放进度
  const seekVoice = useCallback((progress: number) => {
    if (playingVoiceId && voicePlayDuration) {
      const player = voicePlayerManager.getCurrentPlayer()
      if (player) {
        const targetTime = (progress / 100) * voicePlayDuration
        player.seek(targetTime)
      }
    }
  }, [playingVoiceId, voicePlayDuration])

  // 导出播放控制函数供外部使用
  useEffect(() => {
    // 将播放控制函数暴露到全局，供其他组件使用
    const app = Taro.getApp()
    if (app?.globalData) {
      app.globalData.voiceControls = {
        pauseVoice,
        resumeVoice,
        seekVoice
      }
    }
  }, [pauseVoice, resumeVoice, seekVoice])

  // 进入语音模式前先尝试获取录音权限（仅小程序端）
  const onEnterVoiceMode = async () => {
    if (process.env.TARO_ENV !== 'weapp') {
      Taro.showToast({ title: '仅支持小程序端录音', icon: 'none' })
      return
    }
    const app = Taro.getApp();
    // 统一发起权限请求，确保状态最新
    // @ts-ignore
    const granted = !!(await app?.globalData?.requestRecordPermission?.())
    if (!granted) {
      Taro.showToast({ title: '未获得录音权限', icon: 'none' })
      return
    }
    setIsVoiceMode(true)
  }

  // 防抖的输入状态上报函数（使用 useMemo 避免 hooks 警告）
  const debouncedInputHandler = useMemo(() => (
    debounce(() => {
      try {
        socketRef.current?.emit('chat:typing', { chatId })
      } catch (_) { }
    }, 500)
  ), [chatId]);

  // 输入框变化处理
  const handleInputChange = (e: any) => {
    const value = e.detail.value;
    setInputValue(value);
    // 使用防抖函数处理输入状态
    debouncedInputHandler();
  };

  // 发送消息
  const handleSend = () => {
    const text = (inputValue || '').trim();
    if (!text) {
      console.log('发送消息失败：消息内容为空')
      return;
    }

    (async () => {
      try {
        const app = Taro.getApp()
        // @ts-ignore
        const currentUserId = app?.globalData?.currentUserId
        // @ts-ignore
        const API_BASE = app?.globalData?.apiBase || Taro.getStorageSync('API_BASE') || 'http://localhost:3000'

        console.log('📤 发送消息调试信息:', {
          currentUserId,
          chatId,
          API_BASE,
          text,
          socketConnected: socketRef.current?.connected
        })

        if (!currentUserId) {
          console.error('❌ 发送消息失败：用户未登录')
          Taro.showToast({ title: '请先登录', icon: 'none' })
          return
        }

        if (!chatId) {
          console.error('❌ 发送消息失败：chatId不存在')
          Taro.showToast({ title: '聊天会话不存在', icon: 'none' })
          return
        }

        // 乐观更新：先添加到本地消息列表
        const tempMessage = {
          _id: `temp_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`,
          senderId: currentUserId,
          content: text,
          type: 'text',
          createdAt: new Date(),
          readBy: [currentUserId],
          isSending: true
        }
        
        console.log('📤 添加临时消息到本地列表')
        setMessages((prev) => [...prev, tempMessage])
        setInputValue('')

        const res = await Taro.request({
          url: `${API_BASE}/chats/${chatId}/messages`,
          method: 'POST',
          data: { senderId: currentUserId, content: text },
          timeout: 10000
        })

        console.log('📤 发送消息响应:', res)

        if ((res.statusCode as number) >= 200 && (res.statusCode as number) < 300) {
          const sentMessage = res.data
          console.log('✅ 消息发送成功，服务器返回:', sentMessage)
          
          // 移除临时消息，添加真实消息
          setMessages((prev) => {
            const filtered = prev.filter(m => m._id !== tempMessage._id)
            return [...filtered, { ...sentMessage, isSending: false }]
          })
          
          // 确保Socket连接正常
          if (socketRef.current && !socketRef.current.connected) {
            console.warn('⚠️ Socket未连接，尝试重新连接')
            socketRef.current.connect()
          }
          
        } else {
          console.error('❌ 发送消息失败：服务器返回错误状态码', res.statusCode)
          // 移除临时消息
          setMessages((prev) => prev.filter(m => m._id !== tempMessage._id))
          // 恢复输入框内容
          setInputValue(text)
          Taro.showToast({ title: '发送失败', icon: 'none' })
        }
      } catch (error) {
        console.error('❌ 发送消息异常:', error)
        // 移除临时消息
        setMessages((prev) => prev.filter(m => m._id !== `temp_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`))
        // 恢复输入框内容
        setInputValue(text)
        Taro.showToast({ title: '发送失败，请检查网络', icon: 'none' })
      }
    })()
  };

  return (
    <View className='chatBox'>
      <View className='contentBox'>

        {/* 消息列表（气泡样式） */}
        <ScrollView 
          scrollY 
          className='messages'
          onScrollToUpper={loadMoreMessages}
          scrollTop={0}
          enhanced
          showScrollbar={false}
          refresherEnabled
          refresherTriggered={refresherTriggered}
          onRefresherRefresh={onRefresherRefresh}
          onRefresherPulling={onRefresherPulling}
          onRefresherRestore={onRefresherRestore}
          refresherBackground='#f8f9fa'
        >
          {/* 刷新状态指示器 */}
          <View className='refresh-status-container'>
            {(isAutoRefreshing || isRefreshing) && (
              <View className='refresh-indicator'>
                <Text className='refresh-text'>
                  {isRefreshing ? '🔄 下拉刷新中...' : '🔄 正在自动刷新消息...'}
                </Text>
              </View>
            )}
            {lastRefreshTime > 0 && !isAutoRefreshing && !isRefreshing && (
              <View className='last-refresh-time'>
                <Text className='refresh-time-text'>
                  最后更新: {new Date(lastRefreshTime).toLocaleTimeString()}
                </Text>
                <View 
                  className='manual-refresh-btn'
                  onClick={() => {
                    console.log('🔄 手动触发刷新')
                    refreshMessages()
                  }}
                >
                  <Text className='refresh-btn-text'>刷新</Text>
                </View>
              </View>
            )}
          </View>

          {/* 加载更多提示 - 仅在加载时显示 */}
          {isLoadingMore && (
            <View className='load-more-container'>
              <View className='loading-indicator'>
                <Text className='loading-text'>加载中...</Text>
              </View>
            </View>
          )}
          
          {messages.map((m, i) => {
            // @ts-ignore
            const currentUserId = Taro.getApp()?.globalData?.currentUserId
            const isMe = String(m.senderId) === String(currentUserId)
            const sender = userMap[String(m.senderId)]
            // 获取发送者姓名和头像
            const senderName = sender?.nickname || sender?.username || sender?.name || sender?.realName || sender?.displayName || `用户${String(m.senderId).slice(-4)}`
            const senderAvatar = sender?.avatar || (isMe ? currentUser?.avatar : peerUser?.avatar) || 'https://picsum.photos/80'
            return (
              <View key={i} className='msg-item'>
                {/* 发送者姓名显示在气泡上方 - 每条消息都显示 */}
                <View className={`sender-name-container ${isMe ? 'me' : 'other'}`}>
                  <Text className='sender-name-text'>{senderName}</Text>
                </View>
                <View className={`msg-row ${isMe ? 'me' : 'other'}`}>
                  {!isMe && (
                    <View className='avatar'>
                      <Image src={senderAvatar} mode='aspectFill' />
                    </View>
                  )}
                  <View className={`msg-bubble ${isMe ? 'me' : 'other'}`}>
                    {m.type === 'voice' ? (
                      <View className='voice-message-container'>
                        <View
                          className={`voice-message ${playingVoiceId === (m._id || `${m.createdAt}-${m.senderId}-${i}`) ? 'playing' : ''}`}
                          onClick={() => {
                            console.log('🖱️ 语音消息被点击:', {
                              voiceUrl: m.voiceUrl,
                              messageId: m._id || `${m.createdAt}-${m.senderId}-${i}`,
                              messageType: m.type,
                              fullMessage: m
                            })

                            // 检查语音URL格式
                            if (!m.voiceUrl) {
                              console.error('❌ 语音URL为空')
                              Taro.showToast({ title: '语音文件不存在', icon: 'none' })
                              return
                            }

                            // 构建完整的URL
                            const app = Taro.getApp()
                            const API_BASE = app?.globalData?.apiBase || Taro.getStorageSync('API_BASE') || 'http://localhost:3000'
                            const fullVoiceUrl = m.voiceUrl.startsWith('http') ? m.voiceUrl : `${API_BASE}${m.voiceUrl}`

                            console.log('🔗 完整语音URL:', fullVoiceUrl)

                            // 先进行调试测试
                            debugVoicePlayback(fullVoiceUrl)
                            // 然后使用简化的测试函数
                            testVoicePlay(fullVoiceUrl)
                            // 最后使用完整的播放函数
                            // playVoice(fullVoiceUrl, (m._id || `${m.createdAt}-${m.senderId}-${i}`))
                          }}
                        >
                          <View className='voice-icon'>
                            <Text className={`iconfont ${playingVoiceId === (m._id || `${m.createdAt}-${m.senderId}-${i}`) ? 'icon-pause' : 'icon-yuyin'}`}></Text>
                          </View>
                          <View className='voice-duration'>
                            <Text>
                              {playingVoiceId === (m._id || `${m.createdAt}-${m.senderId}-${i}`)
                                ? formatTime(voicePlayCurrentTime)
                                : formatDuration(m.voiceDuration || 0)
                              }
                            </Text>
                          </View>
                          <View className='voice-wave'>
                            {Array.from({ length: 5 }).map((_, index) => (
                              <View
                                key={index}
                                className={`wave-bar ${playingVoiceId === (m._id || `${m.createdAt}-${m.senderId}-${i}`) ? 'active' : ''}`}
                                style={{
                                  animationDelay: `${index * 0.1}s`
                                }}
                              ></View>
                            ))}
                          </View>
                        </View>

                        {/* 播放进度条 */}
                        {playingVoiceId === (m._id || `${m.createdAt}-${m.senderId}-${i}`) && (
                          <View className='voice-progress-container'>
                            <View className='voice-progress-bar'>
                              <View
                                className='voice-progress-fill'
                                style={{ width: `${voicePlayProgress}%` }}
                              ></View>
                            </View>
                            <View className='voice-time-display'>
                              <Text className='current-time'>{formatTime(voicePlayCurrentTime)}</Text>
                              <Text className='total-time'>{formatTime(voicePlayDuration)}</Text>
                            </View>
                          </View>
                        )}


                        {/* 播放错误提示 */}
                        {playbackError && playingVoiceId === (m._id || `${m.createdAt}-${m.senderId}-${i}`) && (
                          <View className='voice-error'>
                            <Text className='error-text'>{playbackError}</Text>
                            <View
                              className='retry-btn'
                              onClick={(e) => {
                                e.stopPropagation()
                                playVoice(m.voiceUrl, (m._id || `${m.createdAt}-${m.senderId}-${i}`))
                              }}
                            >
                              <Text className='retry-text'>重试</Text>
                            </View>
                          </View>
                        )}
                      </View>
                    ) : m.type === 'image' ? (
                      <Image
                        key={`img-${i}-${imageRerenderTick}`}
                        src={getImageSrc(m, i)}
                        mode='aspectFill'
                        style={{ maxWidth: '220px', maxHeight: '220px', borderRadius: '6px' }}
                        onError={() => handleImageError(m, i)}
                        onClick={() => {
                          if (m.imageUrl) {
                            Taro.previewImage({ current: m.imageUrl, urls: [m.imageUrl] })
                          }
                        }}
                      />
                    ) : (
                      <Text>{m.content}</Text>
                    )}
                    {isMe && (
                      <View className='status-row'>
                        {m.isSending ? (
                          <Text className='status sending'>发送中...</Text>
                        ) : Array.isArray(m.readBy) && peerId && m.readBy.some((x: any) => String(x) === String(peerId)) ? (
                          <>
                            {/* <EyeOutlined size={14} /> */}
                            <Text className='status'>已读</Text>
                          </>
                        ) : (
                          <Text className='status'>已送达</Text>
                        )}
                      </View>
                    )}
                  </View>
                  {isMe && (
                    <View className='avatar'>
                      <Image src={senderAvatar} mode='aspectFill' />
                    </View>
                  )}
                </View>
                {/* 气泡下方的时间，居中显示 */}
                <View className='msg-time'>
                  <Text>{formatChatTime(m.createdAt)}</Text>
                </View>
              </View>
            )
          })}
        </ScrollView>
      </View>

      {/* 自己写的输入框 */}
      <View className='mybox'>
        <View className='mybox-inner'>
          <Button className='mybtn' onClick={() => chooseAndSendImage('camera')}>
            <Photograph />
          </Button>
          {isVoiceMode ? (
            <View className='voice-input-container'>
              <View
                className={`voice-btn ${isRecording ? 'recording' : ''}`}
                onTouchStart={startRecording}
                onTouchEnd={stopRecording}
                onTouchCancel={stopRecording}
              >
                <Text className='voice-text'>
                  {isRecording ? `录音中 ${recordingDuration}s` : '按住 说话'}
                </Text>
              </View>
            </View>
          ) : (
            <Input
              ref={inputRef}
              type='text'
              value={inputValue}
              onInput={handleInputChange}
              placeholder='发送信息'
              className='myinput'
            />
          )}
          {isVoiceMode ? (
            <View className='mybox-right2'>
              <View onClick={() => setIsVoiceMode(false)}>
                <Text className='iconfont icon-anjian' />
              </View>
            </View>
          ) : (
            <View className='mybox-right'>
              <View onClick={onEnterVoiceMode}>
                <Text className={`iconfont ${(inputValue || '').trim() ? '' : 'icon-yuyin'}`} />
              </View>
              <View onClick={toggleEmojiPanel}>
                <Text className={`iconfont ${showEmojiPanel ? 'icon-anjian' : 'icon-xiaolian'}`} />
              </View>
              <View className={`${(inputValue || '').trim() ? 'feiji' : ''}`}>
                <Text className={`iconfont ${(inputValue || '').trim() ? 'icon-fasong' : (showExtraFunction ? 'icon-chahao' : 'icon-jiahao')}`} onClick={(inputValue || '').trim() ? handleSend : toggleExtraFunction} />
              </View>
            </View>
          )}
        </View>

        {/* 额外功能 */}
        {showExtraFunction && (
          <View className='extra-function'>
            <View className='extra-item' onClick={() => chooseAndSendImage('album')}>
              <Text className='iconfont icon-xiangce' />
              <Text>相册</Text>
            </View>
            <View className='extra-item' onClick={() => chooseAndSendImage('camera')}>
              <Text className='iconfont icon-a-111_paishe' />
              <Text>拍摄</Text>
            </View>
            <View className='extra-item' onClick={startVideoCall}>
              <Text className='iconfont icon-shipintonghua' />
              <Text>视频通话</Text>
            </View>
            <View 
              className='extra-item' 
              onClick={() => {
                // 在开发者工具中显示警告
                if (isDevTool()) {
                  showDevToolWarning('语音通话')
                  // 仍然执行语音通话，但用户知道有限制
                }
                startVoiceCall()
              }}
            >
              <Phone className='yuyin' />
              <Text>语音通话</Text> 
            </View>
          </View>
        )}

        {/* 表情面板 */}
        {showEmojiPanel && (
          <View className='emoji-panel'>
            <ScrollView className='emoji-scroll' scrollY>
              {/* 最近使用 */}
              {recentEmojis.length > 0 && (
                <View className='emoji-section'>
                  <Text className='emoji-title'>最近使用</Text>
                  <View className='emoji-grid'>
                    {recentEmojis.map((emoji, index) => (
                      <View
                        key={`r-${index}`}
                        className='emoji-item'
                        onClick={() => insertEmoji(emoji)}
                      >
                        {emoji}
                      </View>
                    ))}
                  </View>
                </View>
              )}

              {/* 所有表情 */}
              <View className='emoji-section'>
                <Text className='emoji-title'>所有表情</Text>
                <View className='emoji-grid'>
                  {allEmojis.map((emoji, index) => (
                    <View
                      key={`a-${index}`}
                      className='emoji-item'
                      onClick={() => insertEmoji(emoji)}
                    >
                      {emoji}
                    </View>
                  ))}
                </View>
              </View>
            </ScrollView>
          </View>
        )}

      </View>

      {/* 视频通话界面 */}
      {isVideoCallActive && (
        <View className='video-call-overlay'>
          <View className='video-call-container'>
            <View className='video-call-header'>
              <Text className='video-call-title'>视频通话中 - {peerUser?.nickname || peerUser?.username || '医生'}</Text>
              <Button className='video-call-end-btn' onClick={endVideoCall}>结束通话</Button>
            </View>
            <View className='video-call-content'>
              <View className='video-wrapper'>
                {process.env.TARO_ENV === 'weapp' ? (
                  // 小程序环境使用live-pusher和live-player
                  <>
                    <live-player
                      id='remoteVideo'
                      className='remote-video'
                      src=''
                      mode='live'
                      autoplay
                      muted={false}
                      orientation='vertical'
                      object-fit='cover'
                    />
                    <live-pusher
                      id='localVideo'
                      className='local-video'
                      url=''
                      mode='SD'
                      autopush
                      muted={false}
                      enable-camera
                      auto-focus
                      orientation='vertical'
                      beauty={1}
                      whiteness={1}
                      aspect='9:16'
                    />
                  </>
                ) : (
                  // H5环境使用video标签
                  <>
                    <video ref={remoteVideoRef} autoPlay playsInline className='remote-video' />
                    <video ref={localVideoRef} autoPlay playsInline muted className='local-video' />
                  </>
                )}
              </View>
              <View className='video-call-controls'>
                <View 
                  className='control-btn' 
                  onClick={() => {
                    // 切换摄像头
                    if (process.env.TARO_ENV === 'weapp') {
                      // 小程序环境切换摄像头
                      const livePusherContext = Taro.createLivePusherContext()
                      livePusherContext.switchCamera()
                    } else if (localStreamRef.current) {
                      const videoTrack = localStreamRef.current.getVideoTracks()[0]
                      if (videoTrack) {
                        videoTrack.enabled = !videoTrack.enabled
                      }
                    }
                  }}
                >
                  <Text className='iconfont icon-shexiangtou' />
                </View>
                <View 
                  className='control-btn' 
                  onClick={() => {
                    // 切换麦克风
                    if (process.env.TARO_ENV === 'weapp') {
                      // 小程序环境切换麦克风
                      const livePusherContext = Taro.createLivePusherContext()
                      livePusherContext.toggleTorch()
                    } else if (localStreamRef.current) {
                      const audioTrack = localStreamRef.current.getAudioTracks()[0]
                      if (audioTrack) {
                        audioTrack.enabled = !audioTrack.enabled
                      }
                    }
                  }}
                >
                  <Text className='iconfont icon-yuyin' />
                </View>
                <View className='control-btn end-call' onClick={endVideoCall}>
                  <Text className='iconfont icon-guanbi' />
                </View>
              </View>
            </View>
          </View>
        </View>
      )}

      {/* 视频通话邀请弹窗 */}
      {isVideoCallIncoming && incomingCallInfo && (
        <View className='video-call-incoming-overlay'>
          <View className='video-call-incoming-container'>
            <View className='incoming-call-avatar'>
              <Image
                src={peerUser?.avatar || 'https://picsum.photos/80'}
                mode='aspectFill'
                className='avatar-img'
              />
            </View>
            <Text className='incoming-call-title'>视频通话邀请</Text>
            <Text className='incoming-call-subtitle'>
              {incomingCallInfo.fromName || '对方'} 邀请您进行视频通话
            </Text>
            <View className='incoming-call-actions'>
              <Button
                className='incoming-call-btn reject'
                onClick={() => rejectVideoCall(incomingCallInfo)}
              >
                拒绝
              </Button>
              <Button
                className='incoming-call-btn accept'
                onClick={() => answerVideoCall(incomingCallInfo)}
              >
                接听
              </Button>
            </View>
          </View>
        </View>
      )}

      {/* 语音通话邀请弹窗 */}
      {isVoiceCallIncoming && incomingVoiceInfo && (
        <View className='video-call-incoming-overlay'>
          <View className='video-call-incoming-container'>
            <View className='incoming-call-avatar'>
              <Image
                src={peerUser?.avatar || 'https://picsum.photos/80'}
                mode='aspectFill'
                className='avatar-img'
              />
            </View>
            <Text className='incoming-call-title'>语音通话邀请</Text>
            <Text className='incoming-call-subtitle'>
              {incomingVoiceInfo.fromName || '对方'} 邀请您进行语音通话
            </Text>
            <View className='incoming-call-actions'>
              <Button
                className='incoming-call-btn reject'
                onClick={() => rejectVoiceCall(incomingVoiceInfo)}
              >
                拒绝
              </Button>
              <Button
                className='incoming-call-btn accept'
                onClick={() => answerVoiceCall(incomingVoiceInfo)}
              >
                接听
              </Button>
            </View>
          </View>
        </View>
      )}

      {/* 语音通话界面 */}
      {isVoiceCallActive && (
        <View className='video-call-overlay'>
          <View className='video-call-container'>
            <View className='video-call-header'>
              <Text className='video-call-title'>语音通话中 - {peerUser?.nickname || peerUser?.username || '医生'}</Text>
              <Button className='video-call-end-btn' onClick={endVoiceCall}>结束通话</Button>
            </View>
            <View className='video-call-content'>
              <View className='video-wrapper' style={{ alignItems: 'center', justifyContent: 'center', color: '#fff' }}>
                {process.env.TARO_ENV === 'weapp' ? (
                  // 小程序环境使用live-pusher进行语音通话
                  <View className='voice-call-audio-container'>
                    <live-pusher
                      id='voicePusher'
                      className='voice-pusher'
                      url=''
                      mode='SD'
                      autopush
                      muted={false}
                      enable-camera={false}
                      enable-mic
                      orientation='vertical'
                      beauty={0}
                      whiteness={0}
                      aspect='9:16'
                    />
                    <View className='voice-call-status'>
                      <Text className='voice-call-icon'>🎤</Text>
                      <Text className='voice-call-text'>语音通话已连接</Text>
                      <Text className='voice-call-tip'>请确保麦克风权限已开启</Text>
                    </View>
                  </View>
                ) : (
                  // H5环境显示简单状态
                  <View className='voice-call-status'>
                    <Text className='voice-call-icon'>🎤</Text>
                    <Text className='voice-call-text'>语音通话已连接</Text>
                    <Text className='voice-call-tip'>请确保麦克风权限已开启</Text>
                  </View>
                )}
              </View>
            </View>
          </View>
        </View>
      )}

    </View>
  )
}


