<template>
  <div class="media-stream-wrapper">
    <!-- 视频播放核心元素 -->
    <video
      ref="mediaElement"
      autoplay
      playsinline
      :muted="isLocal"
      :class="{ 'no-signal': !hasStream }"
      class="media-element"
    >
      您的浏览器不支持视频播放
    </video>

    <!-- 状态提示 -->
    <div v-if="!hasStream" class="status-overlay">
      <template v-if="isLocal">准备中...</template>
      <template v-else>等待接收数据...</template>
    </div>

    <!-- 错误提示 -->
    <div v-if="hasError" class="error-overlay">
      {{ errorMessage }}
    </div>
  </div>
</template>

<script setup>
import { ref, onMounted, onBeforeUnmount, watch } from 'vue'

// 组件属性
const props = defineProps({
  isLocal: {
    type: Boolean,
    default: false
  },
  autoStart: {
    type: Boolean,
    default: false
  }
})

// 组件事件（仅发送 audio 和 video 类型）
const emit = defineEmits([
  'stream-data', // { type: 'audio'|'video', buffer: ArrayBuffer }
  'error',
  'status-change'
])

// 元素引用
const mediaElement = ref(null)
const mediaSource = ref(null)
const sourceBuffer = ref(null)

// 状态管理
const hasStream = ref(false)
const hasError = ref(false)
const errorMessage = ref('')
const videoQueue = ref([])
const isProcessing = ref(false)
const mediaStream = ref(null)
const audioRecorder = ref(null) // 单独的音频录制器
const videoRecorder = ref(null) // 单独的视频录制器
const audioContext = ref(null)
const audioSource = ref(null)
const audioBufferQueue = ref([])
const isPlayingAudio = ref(false)
const MIN_QUEUE_LENGTH = 4
const workletNode = ref(null)
const isMicrophoneOn = ref(false)
const isCameraOn = ref(false)
const isVideoSupported = ref(true)
const videoTimeoutTimer = ref(null) // 视频超时计时器
const mediaSourceUrl = ref(null) // 存储MediaSource的URL引用

// 配置项（调整视频录制为纯视频）
const MEDIA_CONFIG = {
  AUDIO: {
    SAMPLE_RATE: 16000,
    CHANNELS: 1,
    CHUNK_SIZE: 1024,
    WORKLET_NAME: 'pcm-processor',
    MIME_TYPE: 'audio/webm; codecs=opus', // 纯音频格式
    CONSTRAINTS: {
      sampleRate: { ideal: 16000 },
      channelCount: 1,
      echoCancellation: { ideal: true }, // 强制开启回声消除
      noiseSuppression: { ideal: true }, // 强制开启降噪
      autoGainControl: { ideal: true }, // 自动增益控制
      // 新增高级配置（部分浏览器支持）
      noiseSuppressionLevel: { ideal: 2 }, // 0-2，2为最强降噪
      echoCancellationType: { ideal: 'system' }, // 优先使用系统级回声消除
      suppressLocalAudioPlayback: { ideal: true } // 消除本地播放音频的回声
    }
  },
  VIDEO: {
    WIDTH: 480,
    HEIGHT: 320,
    FRAME_RATE: 15,
    MIME_TYPE: 'video/webm; codecs=vp8', // 纯视频格式
    CONSTRAINTS: {
      width: { ideal: 480, max: 640 },
      height: { ideal: 320, max: 480 },
      frameRate: { ideal: 15, max: 20 },
      facingMode: 'user'
    },
    TIME_SLICE: 300,
    MAX_QUEUE_SIZE: 10,
    BITRATE: 300000,
    TIMEOUT: 5000 // 5秒无数据超时
  }
}

// 内联的 AudioWorklet 处理器代码（保持不变）
const WORKLET_CODE = `
class PcmProcessor extends AudioWorkletProcessor {
  constructor() {
    super();
    this.chunkSize = ${MEDIA_CONFIG.AUDIO.CHUNK_SIZE};
    this.pcmBuffer = new Int16Array();
    // 降低静默阈值（更敏感过滤噪音）
    this.silenceThreshold = 0.003; 
    this.silenceFrames = 0;
    this.silenceFrameLimit = 6; // 减少静默容忍帧数，避免声音截断
    // 新增：低通滤波器（减少高频噪音）
    this.lastSample = 0;
    this.smoothingFactor = 0.7; // 平滑系数（0-1，值越大过滤越强）
    // 新增：动态阈值调整（适应环境噪音）
    this.noiseFloor = 0.0005;
    this.adaptationRate = 0.01;
  }

  process(inputs, outputs, parameters) {
    const input = inputs[0];
    if (input.length === 0) return true;

    const inputData = input[0];
    // 步骤1：应用低通滤波，减少高频噪音
    const filteredData = new Float32Array(inputData.length);
    for (let i = 0; i < inputData.length; i++) {
      this.lastSample = this.smoothingFactor * inputData[i] + (1 - this.smoothingFactor) * this.lastSample;
      filteredData[i] = this.lastSample;
    }

    // 步骤2：动态调整静默阈值（适应环境噪音变化）
    this.updateNoiseFloor(filteredData);
    const adjustedThreshold = Math.max(this.silenceThreshold, this.noiseFloor * 3);

    // 步骤3：判断是否为静默帧
    const isSilence = this.isSilenceFrame(filteredData, adjustedThreshold);
    if (isSilence) {
      this.silenceFrames++;
      if (this.silenceFrames > this.silenceFrameLimit) {
        return true;
      }
    } else {
      this.silenceFrames = 0;
    }

    // 步骤4：转换并缓冲数据
    const pcmData = this.float32ToInt16(filteredData);
    this.pcmBuffer = this.concatBuffers(this.pcmBuffer, pcmData);

    // 按块发送数据（确保数据连续性）
    while (this.pcmBuffer.length >= this.chunkSize) {
      const chunk = new Int16Array(this.pcmBuffer.subarray(0, this.chunkSize));
      this.port.postMessage(chunk.buffer); 
      this.pcmBuffer = this.pcmBuffer.subarray(this.chunkSize);
    }

    return true;
  }

  // 新增：动态更新环境噪音基准值
  updateNoiseFloor(buffer) {
    let sum = 0;
    for (let i = 0; i < buffer.length; i++) {
      sum += Math.abs(buffer[i]);
    }
    const avg = sum / buffer.length;
    // 只在静默时更新基准值，避免正常声音影响
    if (avg < this.silenceThreshold) {
      this.noiseFloor = (1 - this.adaptationRate) * this.noiseFloor + this.adaptationRate * avg;
    }
  }

  isSilenceFrame(buffer, threshold) {
    let sum = 0;
    for (let i = 0; i < buffer.length; i++) {
      sum += Math.abs(buffer[i]);
    }
    return sum / buffer.length < threshold;
  }

  float32ToInt16(buffer) {
    const len = buffer.length;
    const result = new Int16Array(len);
    for (let i = 0; i < len; i++) {
      const val = Math.max(-1, Math.min(1, buffer[i]));
      result[i] = val < 0 ? Math.floor(val * 0x8000) : Math.floor(val * 0x7FFF);
    }
    return result;
  }

  concatBuffers(a, b) {
    const tmp = new Int16Array(a.length + b.length);
    tmp.set(a, 0);
    tmp.set(b, a.length);
    return tmp;
  }
}

registerProcessor('${MEDIA_CONFIG.AUDIO.WORKLET_NAME}', PcmProcessor);
`

onMounted(async () => {
  // await initialize()
})

const initialize = async () => {
  if (props.isLocal) {
    await initAudioContext()
    checkVideoSupport()
    if (props.autoStart) {
      await createCompleteMediaStream()
      if (mediaStream.value) {
        await toggleMicrophone()
        await toggleCamera()
      }
    }
  } else {
    // initMediaSource()
  }
}

// 创建完整的音视频流（保持轨道统一管理）
const createCompleteMediaStream = async () => {
  if (mediaStream.value) return true

  try {
    const constraints = {
      audio: MEDIA_CONFIG.AUDIO.CONSTRAINTS,
      video: MEDIA_CONFIG.VIDEO.CONSTRAINTS
    }

    mediaStream.value = await navigator.mediaDevices.getUserMedia(constraints)
    mediaElement.value.srcObject = mediaStream.value

    // 初始禁用所有轨道
    const audioTrack = mediaStream.value.getAudioTracks()[0]
    const videoTrack = mediaStream.value.getVideoTracks()[0]
    if (audioTrack) audioTrack.enabled = false
    if (videoTrack) videoTrack.enabled = false

    await createAudioWorklet()
    hasStream.value = true
    return true
  } catch (error) {
    setError(`媒体流初始化失败: ${error.message}`)
    return false
  }
}

// 初始化音频上下文
const initAudioContext = async () => {
  if (!window.AudioContext && !window.webkitAudioContext) {
    setError('浏览器不支持音频功能')
    return
  }

  audioContext.value = new (window.AudioContext || window.webkitAudioContext)({
    sampleRate: MEDIA_CONFIG.AUDIO.SAMPLE_RATE,
    latencyHint: 'interactive'
  })

  document.addEventListener(
    'click',
    async () => {
      if (audioContext.value && audioContext.value.state === 'suspended') {
        await audioContext.value.resume()
      }
    },
    { once: true }
  )

  try {
    const blob = new Blob([WORKLET_CODE], { type: 'application/javascript' })
    const workletUrl = URL.createObjectURL(blob)
    await audioContext.value.audioWorklet.addModule(workletUrl)
    URL.revokeObjectURL(workletUrl)
  } catch (error) {
    setError(`音频处理初始化失败: ${error.message}`)
  }
}

// 检查视频支持（同时检查音频格式）
const checkVideoSupport = () => {
  if (!window.MediaRecorder || !window.MediaSource) {
    isVideoSupported.value = false
    setError('浏览器不支持音视频录制功能')
    return
  }

  // 分别检查音频和视频格式支持
  const audioSupported = MediaRecorder.isTypeSupported(MEDIA_CONFIG.AUDIO.MIME_TYPE)
  const videoSupported =
    MediaRecorder.isTypeSupported(MEDIA_CONFIG.VIDEO.MIME_TYPE) &&
    MediaSource.isTypeSupported(MEDIA_CONFIG.VIDEO.MIME_TYPE)

  if (!audioSupported) {
    setError('浏览器不支持音频录制格式')
  }
  if (!videoSupported) {
    isVideoSupported.value = false
    setError('浏览器不支持视频录制格式')
  }
}
const isFirstVideo = ref(true)
// 初始化远程视频数据源（参考视频播放逻辑优化）
const initMediaSource = () => {
  if (!window.MediaSource) {
    setError('浏览器不支持视频流播放')
    return
  }

  // 检查格式支持性
  if (!MediaSource.isTypeSupported(MEDIA_CONFIG.VIDEO.MIME_TYPE)) {
    setError(`不支持的视频格式: ${MEDIA_CONFIG.VIDEO.MIME_TYPE}`)
    return
  }

  mediaSource.value = new MediaSource()

  mediaSource.value.addEventListener('sourceopen', () => {
    try {
      console.log('sourceopen')
      sourceBuffer.value = mediaSource.value.addSourceBuffer(MEDIA_CONFIG.VIDEO.MIME_TYPE)
      sourceBuffer.value.addEventListener('updateend', processVideoQueue)
      hasStream.value = true
    } catch (error) {
      setError(`视频初始化失败: ${error.message}`)
    }
  })

  mediaSource.value.addEventListener('sourceclose', () => {
    console.log('sourceclose')
    // 先清理当前资源
    cleanupMediaSource()
  })

  mediaSource.value.addEventListener('error', e => {
    console.error('MediaSource错误:', e)
    setError(`视频源错误: ${e.message || '未知错误'}`)
  })
  mediaSourceUrl.value = URL.createObjectURL(mediaSource.value)
  mediaElement.value.src = mediaSourceUrl.value
}

// 清理MediaSource资源
const cleanupMediaSource = () => {
  if (mediaSource.value) {
    if (mediaSource.value.readyState !== 'closed') {
      try {
        mediaSource.value.endOfStream()
      } catch (e) {
        console.warn('关闭MediaSource失败:', e)
      }
    }
    mediaSource.value = null
  }
  mediaElement.value.src = ''
  if (mediaSourceUrl.value) {
    URL.revokeObjectURL(mediaSourceUrl.value)
    mediaSourceUrl.value = null
  }
  if (sourceBuffer.value) {
    sourceBuffer.value.removeEventListener('updateend', processVideoQueue)
    sourceBuffer.value = null
  }

  videoQueue.value = []
}
// 处理视频队列（参考优化）
const processVideoQueue = () => {
  // 严格检查状态
  if (!mediaSource.value || mediaSource.value.readyState !== 'open') {
    console.warn('MediaSource未打开，停止处理队列')
    isProcessing.value = false
    return
  }

  if (!sourceBuffer.value || sourceBuffer.value.updating) {
    // 延迟重试
    setTimeout(processVideoQueue, 50)
    return
  }

  if (videoQueue.value.length === 0) {
    isProcessing.value = false
    return
  }

  isProcessing.value = true
  const buffer = videoQueue.value.shift()

  try {
    sourceBuffer.value.appendBuffer(buffer)
  } catch (err) {
    console.error('appendBuffer错误:', err)
    // 仅在MediaSource有效时报告错误
    if (mediaSource.value && mediaSource.value.readyState === 'open') {
      setError(`视频播放错误: ${err.message}`)
    }
    isProcessing.value = false
    // 错误后尝试继续处理
    setTimeout(processVideoQueue, 50)
  }
}

// 切换麦克风（单独录制纯音频）
const toggleMicrophone = async () => {
  if (!props.isLocal) return false

  if (!mediaStream.value) {
    const created = await createCompleteMediaStream()
    if (!created) return false
  }

  const audioTrack = mediaStream.value.getAudioTracks()[0]
  if (!audioTrack) {
    setError('未找到音频轨道')
    return false
  }

  if (isMicrophoneOn.value) {
    // 关闭麦克风：停止音频录制
    audioTrack.enabled = false
    if (audioRecorder.value) {
      audioRecorder.value.stop()
      audioRecorder.value = null
    }
    if (workletNode.value) {
      workletNode.value.disconnect()
      workletNode.value = null
    }
  } else {
    // 开启麦克风：启动音频录制
    audioTrack.enabled = true
    await createAudioWorklet()
  }

  isMicrophoneOn.value = !isMicrophoneOn.value
  emitStatusChange()
  return true
}

// 创建音频工作节点
const createAudioWorklet = async () => {
  if (!audioContext.value || !mediaStream.value) return

  if (workletNode.value) {
    workletNode.value.disconnect()
  }
  if (audioSource.value) {
    audioSource.value.disconnect()
  }

  audioSource.value = audioContext.value.createMediaStreamSource(mediaStream.value)
  workletNode.value = new AudioWorkletNode(audioContext.value, MEDIA_CONFIG.AUDIO.WORKLET_NAME)

  audioSource.value.connect(workletNode.value)
  workletNode.value.connect(audioContext.value.destination)

  // 音频工作节点用于实时处理，录制器用于获取编码后的数据
  workletNode.value.port.onmessage = event => {
    if (isMicrophoneOn.value) {
      emit('stream-data', {
        type: 'audio',
        buffer: event.data
      })
    }
  }
}
const isFirst = ref(true)

// 切换摄像头（单独录制纯视频）
const toggleCamera = async () => {
  if (!props.isLocal || !isVideoSupported.value) return false

  if (!mediaStream.value) {
    const created = await createCompleteMediaStream()
    if (!created) return false
  }

  const videoTrack = mediaStream.value.getVideoTracks()[0]
  if (!videoTrack) {
    setError('未找到视频轨道')
    return false
  }

  if (isCameraOn.value) {
    isFirst.value = true
    // 关闭摄像头：停止视频录制
    videoTrack.enabled = false
    if (videoRecorder.value) {
      videoRecorder.value.stop()
      videoRecorder.value = null
    }
  } else {
    // 开启摄像头：启动视频录制
    videoTrack.enabled = true
    startVideoRecording() // 单独录制纯视频
  }

  isCameraOn.value = !isCameraOn.value
  emitStatusChange()
  return true
}

// 启动纯视频录制
const startVideoRecording = () => {
  if (!mediaStream.value) return
  console.log('startVideoRecording', isCameraOn.value)
  // 创建仅包含视频的流
  const videoOnlyStream = new MediaStream([mediaStream.value.getVideoTracks()[0]])

  videoRecorder.value = new MediaRecorder(videoOnlyStream, {
    mimeType: MEDIA_CONFIG.VIDEO.MIME_TYPE,
    videoBitsPerSecond: MEDIA_CONFIG.VIDEO.BITRATE
  })

  videoRecorder.value.ondataavailable = event => {
    if (event.data.size > 0 && isCameraOn.value) {
      event.data.arrayBuffer().then(buffer => {
        emit('stream-data', {
          first: isFirst.value,
          type: 'video',
          buffer: buffer
        })
        isFirst.value = false
      })
    }
  }

  videoRecorder.value.onerror = e => {
    console.error(' 视频录制错误:', e)
    setError(`录制错误: ${e.message || '未知错误'}`)
  }

  videoRecorder.value.start(MEDIA_CONFIG.VIDEO.TIME_SLICE)
}

// 新增：重启视频录制方法
const restart = async () => {
  console.log('重启视频录制')

  // 如果摄像头当前是开启状态，先关闭
  if (isCameraOn.value) {
    // 停止当前录制
    if (videoRecorder.value) {
      await videoRecorder.value.stop()
      videoRecorder.value = null
    }

    // 禁用视频轨道
    const videoTrack = await mediaStream.value?.getVideoTracks()[0]
    if (videoTrack) {
      videoTrack.enabled = false
    }
    // 短暂延迟确保资源释放
    await new Promise(resolve => setTimeout(resolve, 1000))
  }

  // 重置第一个片段标记
  isFirst.value = true

  const videoTrack = mediaStream.value?.getVideoTracks()[0]
  videoTrack.enabled = true
  startVideoRecording()
  isCameraOn.value = true
  emitStatusChange()

  return true
}

// 新增：获取可用摄像头设备列表（兼容华为WebView）
const getCameraDevices = async () => {
  try {
    // 华为设备可能需要先请求基础权限才能正确枚举设备
    await navigator.mediaDevices.getUserMedia({ video: true })

    const devices = await navigator.mediaDevices.enumerateDevices()
    const cameras = devices.filter(device => device.kind === 'videoinput')

    // 华为设备可能隐藏设备ID或返回不完整列表，这里做兼容处理
    return cameras.map(cam => ({
      ...cam,
      // 标记是否可能是华为设备的后置摄像头
      isPossibleBack: cam.label.toLowerCase().includes('back') || cam.label.toLowerCase().includes('environment')
    }))
  } catch (error) {
    setError(`获取摄像头列表失败: ${error.message}`)
    return []
  }
}
const targetFacing = ref('user')
// 新增：翻转摄像头（切换前后置，兼容华为WebView）
const flipCamera = async () => {
  if (!props.isLocal || !mediaStream.value) return false

  // 停止当前所有轨道
  mediaStream.value.getTracks().forEach(track => track.stop())

  targetFacing.value = targetFacing.value === 'user' ? 'environment' : 'user'

  try {
    // 优先尝试基于方向切换（兼容华为）
    const newStream = await navigator.mediaDevices.getUserMedia({
      audio: MEDIA_CONFIG.AUDIO.CONSTRAINTS,
      video: {
        ...MEDIA_CONFIG.VIDEO.CONSTRAINTS,
        facingMode: { ideal: targetFacing.value } // 使用ideal而非exact，避免华为拦截
      }
    })

    // 更新媒体流和视频元素
    mediaStream.value = newStream
    mediaElement.value.srcObject = newStream

    // 恢复录制状态
    const newVideoTrack = newStream.getVideoTracks()[0]
    const newAudioTrack = newStream.getAudioTracks()[0]

    if (newVideoTrack) {
      // 如果摄像头处于开启状态，重启录制
      newVideoTrack.enabled = isCameraOn.value
      if (videoRecorder.value) {
        videoRecorder.value.stop()
        videoRecorder.value = null
      }
      isFirst.value = true
      startVideoRecording()
    }

    // 恢复音频状态
    if (newAudioTrack) {
      newAudioTrack.enabled = isMicrophoneOn.value
      // 重新创建音频工作节点以确保音频处理正常
      if (isMicrophoneOn.value) {
        await createAudioWorklet()
      }
    }

    emitStatusChange()
    return true
  } catch (error) {
    console.warn('基于方向切换失败，尝试设备ID切换方案:', error)

    // 备选方案：使用设备ID切换（兼容其他设备）
    try {
      const cameras = await getCameraDevices()
      if (cameras.length < 2) {
        setError('未检测到多个摄像头设备')
        await createCompleteMediaStream()
        return false
      }
      // 找到当前使用的摄像头ID
      const currentVideoTrack = mediaStream.value.getVideoTracks()[0]
      const currentDeviceId = currentVideoTrack?.getSettings().deviceId

      const nextCamera = cameras.find(cam => cam.deviceId !== currentDeviceId) || cameras[0]

      const newStream = await navigator.mediaDevices.getUserMedia({
        audio: MEDIA_CONFIG.AUDIO.CONSTRAINTS,
        video: {
          ...MEDIA_CONFIG.VIDEO.CONSTRAINTS,
          deviceId: { ideal: nextCamera.deviceId } // 改为ideal提高兼容性
        }
      })

      // 后续处理同上
      mediaStream.value = newStream
      mediaElement.value.srcObject = newStream

      const newVideoTrack = newStream.getVideoTracks()[0]
      const newAudioTrack = newStream.getAudioTracks()[0]

      if (newVideoTrack) {
        // 如果摄像头处于开启状态，重启录制
        newVideoTrack.enabled = isCameraOn.value
        if (videoRecorder.value) {
          videoRecorder.value.stop()
          videoRecorder.value = null
        }
        isFirst.value = true
        startVideoRecording()
      }

      // 恢复音频状态
      if (newAudioTrack) {
        newAudioTrack.enabled = isMicrophoneOn.value
        // 重新创建音频工作节点以确保音频处理正常
        if (isMicrophoneOn.value) {
          await createAudioWorklet()
        }
      }

      emitStatusChange()
      return true
    } catch (secondError) {
      setError(`切换摄像头失败: ${secondError.message}`)
      // 尝试恢复原始流
      await createCompleteMediaStream()
      return false
    }
  }
}

// 接收远程数据（仅处理 audio 和 video）
const receiveBuffer = (type, buffer) => {
  if (props.isLocal) {
    setError(' 本地流不能接收数据 ')
    return
  }

  try {
    hasStream.value = true
    if (type === 'audio') {
      handleAudioData(buffer)
    } else if (type.includes('video')) {
      if (isFirstVideo.value) {
        if (type.includes('First')) {
          isFirstVideo.value = false
        } else {
          return
        }
      }

      videoQueue.value.push(buffer)

      // 限制队列大小，防止内存溢出
      if (videoQueue.value.length > MEDIA_CONFIG.VIDEO.MAX_QUEUE_SIZE) {
        videoQueue.value = videoQueue.value.slice(-Math.floor(MEDIA_CONFIG.VIDEO.MAX_QUEUE_SIZE / 2))
        console.warn('视频队列过长，已截断')
      }

      processVideoQueue()
    }
  } catch (error) {
    setError(`数据处理错误: ${error.message}`)
  }
}

// 播放音频（保持不变）
const handleAudioData = async buffer => {
  if (!audioContext.value) {
    await initAudioContext()
    if (!audioContext.value) return
  }

  try {
    audioBufferQueue.value.push(buffer)

    // 限制队列大小
    if (audioBufferQueue.value.length > 8) {
      audioBufferQueue.value = audioBufferQueue.value.slice(-4)
    }

    if (!isPlayingAudio.value && audioBufferQueue.value.length >= MIN_QUEUE_LENGTH) {
      playNextAudioBuffer()
    }
  } catch (error) {
    console.error(' 处理音频数据失败:', error)
  }
}

// 播放音频
const playNextAudioBuffer = async () => {
  if (audioBufferQueue.value.length === 0) {
    isPlayingAudio.value = false
    return
  }

  isPlayingAudio.value = true
  const arrayBuffer = audioBufferQueue.value.shift()

  try {
    if (!audioContext.value || audioContext.value.state === 'closed') return

    if (audioContext.value.state === 'suspended') {
      await audioContext.value.resume()
    }

    const pcmBuffer = new Int16Array(arrayBuffer)
    const floatBuffer = int16ToFloat32(pcmBuffer)

    const audioBuffer = audioContext.value.createBuffer(
      MEDIA_CONFIG.AUDIO.CHANNELS,
      floatBuffer.length,
      MEDIA_CONFIG.AUDIO.SAMPLE_RATE
    )
    audioBuffer.getChannelData(0).set(floatBuffer)

    const source = audioContext.value.createBufferSource()
    source.buffer = audioBuffer
    source.connect(audioContext.value.destination)
    source.start(0)

    source.onended = () => {
      if (audioBufferQueue.value.length > 0) {
        playNextAudioBuffer()
      } else {
        isPlayingAudio.value = false
      }
    }
  } catch (error) {
    console.error(' 播放音频失败:', error)
    playNextAudioBuffer()
  }
}

// 类型转换：Int16 转 Float32
const int16ToFloat32 = buffer => {
  const len = buffer.length
  const result = new Float32Array(len)
  for (let i = 0; i < len; i++) {
    result[i] = buffer[i] < 0 ? buffer[i] / 0x8000 : buffer[i] / 0x7fff
  }
  return result
}

// 发送状态变更
const emitStatusChange = () => {
  emit('status-change', {
    isMicrophoneOn: isMicrophoneOn.value,
    isCameraOn: isCameraOn.value,
    hasStream: hasStream.value
  })
}

// 错误处理
const setError = msg => {
  hasError.value = true
  errorMessage.value = msg
  emit('error', new Error(msg))
}

// 清理资源
const cleanup = () => {
  // 停止媒体流
  if (mediaStream.value) {
    mediaStream.value.getTracks().forEach(track => track.stop())
    mediaStream.value = null
  }

  // 停止录制器
  if (audioRecorder.value) {
    audioRecorder.value.stop()
    audioRecorder.value = null
  }
  if (videoRecorder.value) {
    videoRecorder.value.stop()
    videoRecorder.value = null
  }

  // 清理音频节点
  if (workletNode.value) {
    workletNode.value.disconnect()
    workletNode.value = null
  }
  if (audioSource.value) {
    audioSource.value.disconnect()
    audioSource.value = null
  }

  // 清理视频源
  cleanupMediaSource()

  // 重置状态
  hasStream.value = false
  isMicrophoneOn.value = false
  isCameraOn.value = false
  videoQueue.value = []
  audioBufferQueue.value = []
  emitStatusChange()
}

// 暴露组件方法
defineExpose({
  initialize,
  toggleMicrophone,
  toggleCamera,
  restart,
  flipCamera,
  receiveBuffer,
  stop: cleanup,
  getStatus: () => ({
    isMicrophoneOn: isMicrophoneOn.value,
    isCameraOn: isCameraOn.value,
    hasStream: hasStream.value
  }),
  stopVideoStream: () => {
    cleanupMediaSource()
    hasStream.value = false
  },
  initMediaSource: () => initMediaSource()
})

// 组件卸载清理
onBeforeUnmount(() => {
  cleanup()
  if (audioContext.value) {
    audioContext.value.close()
  }
})

// 监听媒体流状态
watch(mediaStream, newStream => {
  if (newStream) {
    hasStream.value = true
    newStream.getTracks().forEach(track => {
      track.addEventListener('ended', () => {
        hasStream.value = newStream.getTracks().some(t => t.readyState === 'live')
        emitStatusChange()
      })
    })
  } else {
    hasStream.value = false
  }
  emitStatusChange()
})
</script>

<style scoped>
.media-stream-wrapper {
  position: relative;
  width: 100%;
  height: 100%;
  background-color: #000;
  border-radius: 4px;
  overflow: hidden;
  box-sizing: border-box;
}

.media-element {
  width: 100%;
  height: 100%;
  object-fit: cover;
}

.media-element.no-signal {
  background-color: #222;
}

.status-overlay {
  position: absolute;
  top: 50%;
  left: 50%;
  transform: translate(-50%, -50%);
  color: #fff;
  background-color: rgba(0, 0, 0, 0.6);
  padding: 8px 16px;
  border-radius: 4px;
  font-size: 14px;
}

.error-overlay {
  position: absolute;
  bottom: 0;
  left: 0;
  right: 0;
  padding: 8px;
  background-color: rgba(220, 53, 69, 0.8);
  color: white;
  font-size: 12px;
  box-sizing: border-box;
}

/* 移动端适配 */
@media (max-width: 768px) {
  .status-overlay {
    font-size: 12px;
    padding: 6px 12px; /* 减小内边距 */
  }

  .error-overlay {
    font-size: 11px;
    padding: 6px; /* 减小错误提示区域 */
  }
}

/* 小屏手机适配 */
@media (max-width: 375px) {
  .status-overlay {
    font-size: 11px;
    padding: 4px 8px;
  }
}
</style>