<template>
  <div class="media-chat">
    <!-- 视频容器 -->
    <div class="video-container">
      <div class="local-video-wrapper">
        <video ref="localVideo" autoplay muted playsinline class="video-element">您的浏览器不支持视频标签</video>
        <div class="video-label">本地视频</div>
      </div>
      <div class="remote-video-wrapper">
        <video
          ref="remoteVideo"
          autoplay
          playsinline
          class="video-element"
          :class="{ 'no-signal': !remoteCameraActive }"
        >
          等待对方视频信号...
        </video>
        <div class="video-label">远程视频</div>
      </div>
    </div>

    <!-- 控制按钮区域 -->
    <div class="controls">
      <button class="voice-button" @click="toggleMicrophone" :disabled="!isConnected">
        <span v-if="isMicrophoneOn">关闭麦克风</span>
        <span v-else>开启麦克风</span>
      </button>
      <button class="video-button" @click="toggleCamera" :disabled="!isConnected || !isVideoSupported">
        <span v-if="isCameraOn">关闭摄像头</span>
        <span v-else>开启摄像头</span>
        <span v-if="!isVideoSupported" class="unsupported-indicator">不支持</span>
      </button>
      <button class="hangup-button" @click="endMediaCall" :disabled="!isConnected">结束通话</button>
    </div>

    <!-- 状态指示 -->
    <div class="status-bar">
      <div class="status-indicator voice" :class="{ active: isMicrophoneOn }"></div>
      <div class="status-indicator video" :class="{ active: isCameraOn }"></div>
      <div class="connection-status" :class="{ connected: isConnected, error: hasError }">
        {{ connectionStatus }}
      </div>
    </div>
  </div>
</template>

<script setup>
import { ref, onMounted, onBeforeUnmount, watch, defineEmits, defineExpose } from 'vue'

// 状态管理
const isMicrophoneOn = ref(false)
const isCameraOn = ref(false)
const mediaStream = ref(null) // 统一管理的音视频流
const isConnected = ref(false)
const connectionStatus = ref('未连接')
const currentTarget = ref(null)
const audioContext = ref(null)
const audioSource = ref(null)
const workletNode = ref(null)
const localVideo = ref(null)
const remoteVideo = ref(null)
const hasError = ref(false)
const isVideoSupported = ref(true)
const remoteCameraActive = ref(false) // 跟踪远程摄像头状态

// 视频相关对象
let mediaRecorder = null
let mediaSource = null
let sourceBuffer = null
let videoChunkQueue = []
let isProcessingQueue = false
let mediaSourceUrl = null
let videoProcessingTimeout = null

// 配置项（合并音视频约束）
const MEDIA_CONFIG = {
  // 音频配置
  AUDIO: {
    SAMPLE_RATE: 16000,
    CHANNELS: 1,
    CHUNK_SIZE: 1024,
    WORKLET_NAME: 'pcm-processor',
    CONSTRAINTS: {
      sampleRate: { ideal: 16000 },
      channelCount: 1,
      echoCancellation: true,
      noiseSuppression: true,
      autoGainControl: true
    }
  },
  // 视频配置
  VIDEO: {
    WIDTH: 480,
    HEIGHT: 320,
    FRAME_RATE: 15,
    MIME_TYPE: 'video/webm; codecs=vp8',
    CONSTRAINTS: {
      width: { ideal: 480, max: 640 },
      height: { ideal: 320, max: 480 },
      frameRate: { ideal: 15, max: 20 },
      facingMode: 'user'
    },
    TIME_SLICE: 100,
    MAX_QUEUE_SIZE: 2,
    BITRATE: 300000
  }
}
// 新增关键帧检测状态
const keyframeBuffer = ref([])
const keyframesReceived = ref(0)
const isFirstRender = ref(true)

// 内联的 AudioWorklet 处理器代码（保持不变）
const WORKLET_CODE = `
class PcmProcessor extends AudioWorkletProcessor {
  constructor() {
    super();
    this.chunkSize = ${MEDIA_CONFIG.AUDIO.CHUNK_SIZE};
    this.pcmBuffer = new Int16Array();
    this.silenceThreshold = 0.003;
    this.silenceFrames = 0;
    this.silenceFrameLimit = 10;
  }

  process(inputs, outputs, parameters) {
    const input = inputs[0];
    if (input.length === 0) return true;

    const inputData = input[0];
    const isSilence = this.isSilenceFrame(inputData);
    if (isSilence) {
      this.silenceFrames++;
      if (this.silenceFrames > this.silenceFrameLimit) {
        return true;
      }
    } else {
      this.silenceFrames = 0;
    }

    const pcmData = this.float32ToInt16(inputData);
    this.pcmBuffer = this.concatBuffers(this.pcmBuffer, pcmData);

    while (this.pcmBuffer.length >= this.chunkSize) {
      const chunk = new Int16Array(this.pcmBuffer.subarray(0, this.chunkSize));
      this.port.postMessage(chunk.buffer); 
      this.pcmBuffer = this.pcmBuffer.subarray(this.chunkSize);
    }

    return true;
  }

  isSilenceFrame(buffer) {
    let sum = 0;
    for (let i = 0; i < buffer.length; i++) {
      sum += Math.abs(buffer[i]);
    }
    return sum / buffer.length < this.silenceThreshold;
  }

  float32ToInt16(buffer) {
    const len = buffer.length;
    const result = new Int16Array(len);
    for (let i = 0; i < len; i++) {
      const val = Math.max(-1, Math.min(1, buffer[i]));
      result[i] = val < 0 ? Math.floor(val * 0x8000) : Math.floor(val * 0x7FFF);
    }
    return result;
  }

  concatBuffers(a, b) {
    const tmp = new Int16Array(a.length + b.length);
    tmp.set(a, 0);
    tmp.set(b, a.length);
    return tmp;
  }
}

registerProcessor('${MEDIA_CONFIG.AUDIO.WORKLET_NAME}', PcmProcessor);
`

// 音频播放队列管理
const audioBufferQueue = ref([])
const isPlayingAudio = ref(false)
const MIN_QUEUE_LENGTH = 3

// 初始化
onMounted(async () => {
  await initAudioContext()
  checkVideoSupport()
  initRemoteVideo()
})

// 初始化音频上下文及工作let
const initAudioContext = async () => {
  if (!window.AudioContext && !window.webkitAudioContext) {
    console.error('浏览器不支持 Web Audio API')
    connectionStatus.value = '您的浏览器不支持语音功能'
    return
  }

  audioContext.value = new (window.AudioContext || window.webkitAudioContext)({
    sampleRate: MEDIA_CONFIG.AUDIO.SAMPLE_RATE,
    latencyHint: 'interactive'
  })

  // 处理iOS Safari的自动播放限制
  document.addEventListener(
    'click',
    async () => {
      if (audioContext.value && audioContext.value.state === 'suspended') {
        await audioContext.value.resume()
      }
    },
    { once: true }
  )

  try {
    const blob = new Blob([WORKLET_CODE], { type: 'application/javascript' })
    const workletUrl = URL.createObjectURL(blob)
    await audioContext.value.audioWorklet.addModule(workletUrl)
    URL.revokeObjectURL(workletUrl)
    console.log('AudioWorklet 加载成功')
  } catch (error) {
    console.error('加载音频工作let失败:', error)
    connectionStatus.value = '音频处理初始化失败'
  }
}

// 检查视频支持情况
const checkVideoSupport = () => {
  if (!window.MediaRecorder || !window.MediaSource) {
    isVideoSupported.value = false
    connectionStatus.value = '您的浏览器不支持视频通话功能'
    console.warn('浏览器不支持MediaRecorder或MediaSource API')
  } else {
    if (!MediaRecorder.isTypeSupported(MEDIA_CONFIG.VIDEO.MIME_TYPE)) {
      const alternativeMime = 'video/mp4; codecs=avc1.42E01E'
      if (MediaRecorder.isTypeSupported(alternativeMime)) {
        MEDIA_CONFIG.VIDEO.MIME_TYPE = alternativeMime
      } else {
        isVideoSupported.value = false
        connectionStatus.value = '您的浏览器不支持所需的视频格式'
      }
    }
  }
}
const initVido = () => {
  try {
    sourceBuffer = mediaSource.addSourceBuffer(MEDIA_CONFIG.VIDEO.MIME_TYPE)
    sourceBuffer.addEventListener('updateend', processVideoQueue)
    console.log('SourceBuffer created')
  } catch (error) {
    console.error('创建SourceBuffer失败:', error)
    connectionStatus.value = '视频播放初始化失败'
    hasError.value = true
  }
}

const clearVido = () => {
  sourceBuffer = null
}

// 初始化远程视频
const initRemoteVideo = () => {
  mediaSource = new MediaSource()
  mediaSource.addEventListener('sourceclose', () => {
    console.log('MediaSource closed')
    clearVido()
  })

  mediaSource.addEventListener('sourceopen', () => {
    console.log('MediaSource opened')
    initVido()
  })
  mediaSourceUrl = URL.createObjectURL(mediaSource)

  remoteVideo.value.src = mediaSourceUrl
}

// 清理视频资源
const cleanupVideoResources = () => {
  if (videoProcessingTimeout) {
    clearTimeout(videoProcessingTimeout)
    videoProcessingTimeout = null
  }

  if (mediaSource) {
    if (mediaSource.readyState !== 'closed') {
      try {
        mediaSource.endOfStream()
      } catch (e) {
        console.warn('结束MediaStream失败:', e)
      }
    }
    mediaSource = null
  }

  if (sourceBuffer) {
    sourceBuffer.removeEventListener('updateend', processVideoQueue)
    sourceBuffer = null
  }

  if (remoteVideo.value && mediaSourceUrl) {
    remoteVideo.value.src = ''
    URL.revokeObjectURL(mediaSourceUrl)
    mediaSourceUrl = null
  }

  videoChunkQueue = []
  isProcessingQueue = false
}

// 处理视频数据队列
const processVideoQueue = () => {
  if (!sourceBuffer || !mediaSource || mediaSource.readyState !== 'open' || videoChunkQueue.length === 0) {
    isProcessingQueue = false
    return
  }
  if (sourceBuffer.updating) {
    return
  }

  isProcessingQueue = true
  const chunk = videoChunkQueue.shift()

  try {
    sourceBuffer.appendBuffer(chunk)
    processVideoQueue()
  } catch (e) {
    console.error('追加视频数据失败:', e)
  }
}

// 切换麦克风（从同一媒体流中控制音频轨道）
const toggleMicrophone = async () => {
  if (isMicrophoneOn.value) {
    // 关闭麦克风：禁用音频轨道
    if (mediaStream.value) {
      const audioTrack = mediaStream.value.getAudioTracks()[0]
      if (audioTrack) audioTrack.enabled = false
    }
    isMicrophoneOn.value = false
    updateConnectionStatus()
    emit('sendMediaData', {
      type: 'audioStatus',
      data: { targetId: currentTarget.value, status: false }
    })
    return
  }

  if (!currentTarget.value) {
    connectionStatus.value = '请先建立连接'
    return
  }

  try {
    hasError.value = false
    if (audioContext.value && audioContext.value.state === 'suspended') {
      await audioContext.value.resume()
    }

    // 首次开启麦克风或需要重建流
    if (!mediaStream.value || mediaStream.value.getAudioTracks().length === 0) {
      // 若已有视频流，只添加音频轨道；否则创建包含音频的流
      const constraints = {
        audio: MEDIA_CONFIG.AUDIO.CONSTRAINTS,
        video: isCameraOn.value ? MEDIA_CONFIG.VIDEO.CONSTRAINTS : false
      }
      mediaStream.value = await navigator.mediaDevices.getUserMedia(constraints)
      // 绑定本地视频
      if (localVideo.value) {
        localVideo.value.srcObject = mediaStream.value
      }
      // 创建音频处理节点
      await createAudioWorklet()
    } else {
      // 已有音频轨道，直接启用
      const audioTrack = mediaStream.value.getAudioTracks()[0]
      if (audioTrack) audioTrack.enabled = true
      await createAudioWorklet() // 重新连接音频处理
    }

    isMicrophoneOn.value = true
    updateConnectionStatus()
    emit('sendMediaData', {
      type: 'audioStatus',
      data: { targetId: currentTarget.value, status: true }
    })
  } catch (error) {
    console.error('麦克风权限获取失败:', error)
    connectionStatus.value = `错误: ${error.message}`
    hasError.value = true
  }
}

// 切换摄像头（从同一媒体流中控制视频轨道）
const toggleCamera = async () => {
  if (!isVideoSupported.value) {
    connectionStatus.value = '您的浏览器不支持视频功能'
    return
  }

  if (isCameraOn.value) {
    // 关闭摄像头：禁用视频轨道
    if (mediaStream.value) {
      const videoTrack = mediaStream.value.getVideoTracks()[0]
      if (videoTrack) videoTrack.enabled = false
    }
    if (mediaRecorder) {
      mediaRecorder.stop()
      mediaRecorder = null
    }
    isCameraOn.value = false
    updateConnectionStatus()
    emit('sendMediaData', {
      type: 'videoStatus',
      data: { targetId: currentTarget.value, status: false }
    })
    return
  }

  if (!currentTarget.value) {
    connectionStatus.value = '请先建立连接'
    return
  }

  try {
    hasError.value = false
    // 首次开启摄像头或需要重建流
    if (!mediaStream.value || mediaStream.value.getVideoTracks().length === 0) {
      // 创建包含音视频的统一流（根据当前麦克风状态决定是否包含音频）
      const constraints = {
        audio: isMicrophoneOn.value ? MEDIA_CONFIG.AUDIO.CONSTRAINTS : false,
        video: MEDIA_CONFIG.VIDEO.CONSTRAINTS
      }
      mediaStream.value = await navigator.mediaDevices.getUserMedia(constraints)
      // 绑定本地视频
      if (localVideo.value) {
        localVideo.value.srcObject = mediaStream.value
      }
      // 若麦克风已开启，重新连接音频处理
      if (isMicrophoneOn.value) {
        await createAudioWorklet()
      }
    } else {
      // 已有视频轨道，直接启用
      const videoTrack = mediaStream.value.getVideoTracks()[0]
      if (videoTrack) videoTrack.enabled = true
    }

    // 启动媒体录制（同一流包含音视频）
    startMediaRecording()
    isCameraOn.value = true
    updateConnectionStatus()
    emit('sendMediaData', {
      type: 'videoStatus',
      data: { targetId: currentTarget.value, status: true }
    })
  } catch (error) {
    console.error('摄像头权限获取失败:', error)
    connectionStatus.value = `错误: ${error.message}`
    hasError.value = true
  }
}

// 启动媒体录制（同一流处理音视频）
const startMediaRecording = () => {
  if (!mediaStream.value || !isVideoSupported.value) return
  if (mediaRecorder) {
    mediaRecorder.stop()
  }

  const options = {
    mimeType: MEDIA_CONFIG.VIDEO.MIME_TYPE,
    videoBitsPerSecond: MEDIA_CONFIG.VIDEO.BITRATE,
    audioBitsPerSecond: isMicrophoneOn.value ? 64000 : 0
  }

  try {
    mediaRecorder = new MediaRecorder(mediaStream.value, options)

    // 录制的数据同时包含音视频，通过二进制发送（类型标记为3）
    mediaRecorder.ondataavailable = event => {
      if (event.data.size > 0 && isConnected.value && currentTarget.value) {
        event.data.arrayBuffer().then(buffer => {
          emit('sendMediaBuffer', packData(3, currentTarget.value, buffer))
        })
      }
    }

    mediaRecorder.start(MEDIA_CONFIG.VIDEO.TIME_SLICE)
    console.log('媒体录制已开始（包含音视频）')
  } catch (error) {
    console.error('启动媒体录制失败:', error)
    hasError.value = true
    connectionStatus.value = `媒体录制失败: ${error.message}`
  }
}

// 创建音频工作let节点（从同一流中获取音频源）
const createAudioWorklet = async () => {
  if (!audioContext.value || !mediaStream.value) return

  try {
    // 清理旧节点
    if (workletNode.value) workletNode.value.disconnect()
    if (audioSource.value) audioSource.value.disconnect()

    // 从统一流中获取音频源
    audioSource.value = audioContext.value.createMediaStreamSource(mediaStream.value)
    workletNode.value = new AudioWorkletNode(audioContext.value, MEDIA_CONFIG.AUDIO.WORKLET_NAME)

    // 连接音频处理链路
    audioSource.value.connect(workletNode.value)
    workletNode.value.connect(audioContext.value.destination) // 可选：本地监听自己的声音

    // 音频数据通过二进制单独发送（类型1保持不变，便于区分纯音频场景）
    workletNode.value.port.onmessage = event => {
      if (isMicrophoneOn.value) {
        emit('sendMediaBuffer', packData(1, currentTarget.value, event.data))
      }
    }
  } catch (error) {
    console.error('创建音频工作let节点失败:', error)
    connectionStatus.value = '音频处理初始化失败'
    hasError.value = true
  }
}

// 数据打包函数
function packData(type, targetId, dataBuffer) {
  const targetIdBytes = new TextEncoder().encode(targetId)
  const targetIdLength = targetIdBytes.length

  const totalLength = 1 + 1 + targetIdLength + dataBuffer.byteLength
  const buffer = new ArrayBuffer(totalLength)
  const view = new DataView(buffer)

  view.setUint8(0, type)
  view.setUint8(1, targetIdLength)
  new Uint8Array(buffer).set(targetIdBytes, 2)
  new Uint8Array(buffer).set(new Uint8Array(dataBuffer), 2 + targetIdLength)

  return buffer
}

// 处理接收的音视频数据（新增类型3：混合流数据）
const handleMediaData = mediaData => {
  if (!mediaData || !(mediaData.data instanceof ArrayBuffer) || mediaData.data.byteLength === 0) return

  try {
    switch (mediaData.type) {
      case 1: // 纯音频
        handleAudioData(mediaData.data)
        break
      case 2: // 纯视频（兼容旧格式）
        if (isConnected.value) {
          handleVideoData(mediaData.data)
        }
        break
      case 3: // 混合音视频流
        if (isConnected.value) {
          handleVideoData(mediaData.data) // 视频轨道从混合流中解析
        }
        break
      default:
        console.warn('未知媒体类型:', mediaData.type)
    }
  } catch (error) {
    console.error('解析媒体数据包失败:', error)
  }
}

// 处理JSON格式的状态数据（保持不变）
const handleStatusData = statusData => {
  if (!statusData.type || !statusData.data) return

  switch (statusData.type) {
    case 'videoStatus':
      remoteCameraActive.value = statusData.data.status
      if (!statusData.data.status) {
        cleanupVideoResources()
      } else {
        initRemoteVideo()
      }
      console.log(`远程摄像头状态更新: ${remoteCameraActive.value ? '开启' : '关闭'}`)
      break
    case 'audioStatus':
      console.log(`远程麦克风状态: ${statusData.data.status ? '开启' : '关闭'}`)
      break
    case 'endCall':
      if (statusData.data.targetId === currentTarget.value) {
        endMediaCall()
        connectionStatus.value = '对方已结束通话'
      }
      break
    default:
      console.warn('未知状态数据类型:', statusData.type)
  }
}

// 处理音频数据（保持不变）
const handleAudioData = binaryData => {
  if (!binaryData || !(binaryData instanceof ArrayBuffer)) return

  try {
    audioBufferQueue.value.push(binaryData)

    // 限制队列大小，防止延迟累积
    if (audioBufferQueue.value.length > 8) {
      audioBufferQueue.value = audioBufferQueue.value.slice(-4)
    }

    if (!isPlayingAudio.value && audioBufferQueue.value.length >= MIN_QUEUE_LENGTH) {
      playNextAudioBuffer()
    }
  } catch (error) {
    console.error('处理音频数据失败:', error)
  }
}

// 关键帧检测函数（简化版）
const checkIfKeyframe = buffer => {
  // 注意：这是一个简化的检测方法，实际WebM格式检测更复杂
  // 真正的WebM关键帧检测需要解析EBML头
  const view = new Uint8Array(buffer)
  const KFI_MARKER = [0x4b, 0x46, 0x49] // "KFI"的ASCII码

  for (let i = 0; i < view.length - KFI_MARKER.length; i++) {
    if (view[i] === KFI_MARKER[0] && view[i + 1] === KFI_MARKER[1] && view[i + 2] === KFI_MARKER[2]) {
      return true
    }
  }

  return false
}

// 处理视频数据（兼容混合流）
const handleVideoData = binaryData => {
  if (!binaryData || !(binaryData instanceof ArrayBuffer) || !sourceBuffer || !mediaSource) return

  try {
    // 正常渲染阶段
    videoChunkQueue.push(binaryData)

    // 控制队列大小，防止内存溢出
    if (videoChunkQueue.length > 10) {
      videoChunkQueue = videoChunkQueue.slice(-5)
    }

    processVideoQueue()
  } catch (error) {
    console.error('处理视频数据失败:', error)
    hasError.value = true
  }
}

// 播放音频（保持不变）
const playNextAudioBuffer = async () => {
  if (audioBufferQueue.value.length === 0) {
    isPlayingAudio.value = false
    return
  }

  isPlayingAudio.value = true
  const arrayBuffer = audioBufferQueue.value.shift()

  try {
    if (!audioContext.value || audioContext.value.state === 'closed') return

    if (audioContext.value.state === 'suspended') {
      await audioContext.value.resume()
    }

    const pcmBuffer = new Int16Array(arrayBuffer)
    const floatBuffer = int16ToFloat32(pcmBuffer)

    const audioBuffer = audioContext.value.createBuffer(
      MEDIA_CONFIG.AUDIO.CHANNELS,
      floatBuffer.length,
      MEDIA_CONFIG.AUDIO.SAMPLE_RATE
    )
    audioBuffer.getChannelData(0).set(floatBuffer)

    const source = audioContext.value.createBufferSource()
    source.buffer = audioBuffer
    source.connect(audioContext.value.destination)
    source.start(0)

    source.onended = () => {
      if (audioBufferQueue.value.length > 0) {
        playNextAudioBuffer()
      } else {
        isPlayingAudio.value = false
      }
    }
  } catch (error) {
    console.error('播放音频失败:', error)
    playNextAudioBuffer() // 尝试播放下一个
  }
}

// 停止音频（通过禁用轨道实现，不销毁流）
const stopAudio = () => {
  if (mediaStream.value) {
    const audioTrack = mediaStream.value.getAudioTracks()[0]
    if (audioTrack) audioTrack.enabled = false
  }
  if (workletNode.value) {
    workletNode.value.port.close()
  }
  isMicrophoneOn.value = false
  updateConnectionStatus()
  emit('sendMediaData', {
    type: 'audioStatus',
    data: { targetId: currentTarget.value, status: false }
  })
}

// 停止视频（通过禁用轨道实现，不销毁流）
const stopVideo = () => {
  if (mediaRecorder) {
    mediaRecorder.stop()
    mediaRecorder = null
  }
  if (mediaStream.value) {
    const videoTrack = mediaStream.value.getVideoTracks()[0]
    if (videoTrack) videoTrack.enabled = false
  }
  cleanupVideoResources()
  isCameraOn.value = false
  updateConnectionStatus()
  emit('sendMediaData', {
    type: 'videoStatus',
    data: { targetId: currentTarget.value, status: false }
  })
}

// 类型转换：Int16转Float32（保持不变）
const int16ToFloat32 = buffer => {
  const len = buffer.length
  const result = new Float32Array(len)
  for (let i = 0; i < len; i++) {
    result[i] = buffer[i] < 0 ? buffer[i] / 0x8000 : buffer[i] / 0x7fff
  }
  return result
}

// 开始通话（保持不变）
const startMediaCall = targetId => {
  if (targetId === currentTarget.value) {
    return
  }
  currentTarget.value = targetId
  isConnected.value = true
  hasError.value = false
  updateConnectionStatus()

  emit('sendMediaData', {
    type: 'startCall',
    data: { targetId }
  })
}

// 结束通话（清理统一流）
const endMediaCall = () => {
  // 停止所有轨道
  if (mediaStream.value) {
    mediaStream.value.getTracks().forEach(track => track.stop())
  }
  mediaStream.value = null

  // 清理录制和处理资源
  if (mediaRecorder) {
    mediaRecorder.stop()
    mediaRecorder = null
  }
  if (workletNode.value) {
    workletNode.value.disconnect()
    workletNode.value = null
  }
  if (audioSource.value) {
    audioSource.value.disconnect()
    audioSource.value = null
  }

  // 重置状态
  isMicrophoneOn.value = false
  isCameraOn.value = false
  isConnected.value = false
  currentTarget.value = null
  connectionStatus.value = '未连接'
  audioBufferQueue.value = []
  hasError.value = false

  // 发送结束通知
  emit('sendMediaData', {
    type: 'endCall',
    data: { targetId: currentTarget.value }
  })
}

// 更新连接状态显示（保持不变）
const updateConnectionStatus = () => {
  if (!isConnected.value) {
    connectionStatus.value = '未连接'
    return
  }

  if (isMicrophoneOn.value && isCameraOn.value) {
    connectionStatus.value = '正在进行音视频通话...'
  } else if (isMicrophoneOn.value) {
    connectionStatus.value = '正在进行语音通话...'
  } else if (isCameraOn.value) {
    connectionStatus.value = '正在进行视频通话（静音）...'
  } else {
    connectionStatus.value = '已连接，等待开始通话...'
  }
}

// 组件卸载前清理资源（保持不变）
onBeforeUnmount(() => {
  endMediaCall()
  if (audioContext.value) {
    audioContext.value.close()
  }
})

// 暴露组件方法（保持不变）
defineExpose({
  startMediaCall,
  endMediaCall,
  toggleMicrophone,
  toggleCamera,
  handleMediaData,
  handleStatusData,
  getStatus: () => ({
    isMicrophoneOn: isMicrophoneOn.value,
    isCameraOn: isCameraOn.value,
    isConnected: isConnected.value,
    connectionStatus: connectionStatus.value
  })
})

// 定义事件发射（保持不变）
const emit = defineEmits(['sendMediaBuffer', 'sendMediaData'])
</script>

<style scoped>
/* 样式保持不变 */
.media-chat {
  display: flex;
  flex-direction: column;
  height: 100%;
  max-width: 800px;
  margin: 0 auto;
  padding: 1rem;
  background-color: #f8f9fa;
  border-radius: 0.5rem;
  box-shadow: 0 2px 10px rgba(0, 0, 0, 0.1);
}

.video-container {
  flex: 1;
  display: flex;
  gap: 1rem;
  margin-bottom: 1rem;
  min-height: 300px;
}

.local-video-wrapper,
.remote-video-wrapper {
  position: relative;
  flex: 1;
  background-color: #000;
  border-radius: 0.5rem;
  overflow: hidden;
}

.video-element {
  width: 100%;
  height: 100%;
  object-fit: cover;
}

.no-signal {
  background-color: #222;
  position: relative;
}

.no-signal::after {
  content: '等待对方视频信号...';
  position: absolute;
  top: 50%;
  left: 50%;
  transform: translate(-50%, -50%);
  color: #888;
}

.video-label {
  position: absolute;
  bottom: 0.5rem;
  left: 0.5rem;
  background-color: rgba(0, 0, 0, 0.5);
  color: white;
  padding: 0.25rem 0.5rem;
  border-radius: 0.25rem;
  font-size: 0.875rem;
}

.controls {
  display: flex;
  gap: 0.5rem;
  margin-bottom: 1rem;
}

button {
  flex: 1;
  padding: 0.75rem 1rem;
  border: none;
  border-radius: 0.375rem;
  background-color: #0d6efd;
  color: white;
  font-size: 1rem;
  cursor: pointer;
  transition: background-color 0.2s;
}

button:hover {
  background-color: #0b5ed7;
}

button:disabled {
  background-color: #6c757d;
  cursor: not-allowed;
}

.hangup-button {
  background-color: #dc3545;
}

.hangup-button:hover {
  background-color: #bb2d3b;
}

.status-bar {
  display: flex;
  align-items: center;
  gap: 0.5rem;
  padding: 0.5rem;
  background-color: #e9ecef;
  border-radius: 0.375rem;
}

.status-indicator {
  width: 12px;
  height: 12px;
  border-radius: 50%;
  background-color: #888;
}

.status-indicator.active {
  background-color: #28a745;
  animation: pulse 2s infinite;
}

@keyframes pulse {
  0% {
    box-shadow: 0 0 0 0 rgba(40, 167, 69, 0.7);
  }
  70% {
    box-shadow: 0 0 0 6px rgba(40, 167, 69, 0);
  }
  100% {
    box-shadow: 0 0 0 0 rgba(40, 167, 69, 0);
  }
}

.connection-status {
  flex: 1;
  font-size: 0.875rem;
  color: #333;
}

.connection-status.connected {
  color: #28a745;
}

.connection-status.error {
  color: #dc3545;
}

.unsupported-indicator {
  display: inline-block;
  margin-left: 0.5rem;
  font-size: 0.75rem;
  background-color: #ffc107;
  color: #000;
  padding: 0.125rem 0.25rem;
  border-radius: 0.25rem;
}
</style>