<template>
  <div v-if="isReady" class="webrtc-container">
    <div class="status-bar">
      <div v-if="errorMessage" class="error-message">
        {{ errorMessage }}
      </div>
      <div class="connection-status">
        连接状态: {{ connectionStatus }}
      </div>
      <div v-if="streamStats" class="stream-stats">
        <div>视频码率: {{ streamStats.video?.bitrate || 0 }} kbps</div>
        <div>帧率: {{ streamStats.video?.frameRate || 0 }} fps</div>
        <div>音频码率: {{ streamStats.audio?.bitrate || 0 }} kbps</div>
      </div>
    </div>
    <div class="video-container">
      <video ref="localVideo" autoplay muted></video>
      <div v-for="(participant, index) in participants" :key="participant.id">
        <video
          :ref="`remoteVideo${index}`"
          autoplay
          :data-id="participant.id"
          :muted="participant.id === localParticipantId"
        ></video>
        <div class="participant-controls">
          <button @click="toggleParticipantAudio(participant.id)">
            {{ participant.audioEnabled ? '静音' : '取消静音' }}
          </button>
          <button
            v-if="callType === 'video'"









            @click="toggleParticipantVideo(participant.id)"
          >
            {{ participant.videoEnabled ? '关闭视频' : '开启视频' }}
          </button>
        </div>
      </div>
    </div>

    <div class="controls">
      <select v-model="callType">
        <option value="video">视频通话</option>
        <option value="audio">音频通话</option>
      </select>
      <select v-model="callMode">
        <option value="one-to-one">一对一</option>
        <option value="one-to-many">一对多</option>
      </select>
      <button @click="startCall">开始通话</button>
      <button @click="endCall">结束通话</button>
      <button @click="toggleAudio">切换音频</button>
      <button v-if="callType === 'video'" @click="toggleVideo">切换视频</button>
      <button @click="startScreenShare">共享屏幕</button>
      <button @click="pushVideoToSRS">推视频流</button>
      <button @click="pushAudioToSRS">推音频流</button>
      <button @click="pushAVToSRS">推音视频流</button>
    </div>

    <div class="srs-controls">
      <div class="control-group">
        <input v-model="streamName" placeholder="输入流名称" />
        <button @click="playVideoStream">播放视频流</button>
        <button @click="playAudioStream">播放音频流</button>
        <button @click="stopPushStream" class="stop">停止推流</button>
      </div>

<div v-if="playingStreams.size > 0" class="stream-urls">
  <div v-for="[name, info] in playingStreams" :key="name" class="stream-url">
    流名称: {{ name }}<br>
    推流地址: {{ info.whipUrl }}<br>
    拉流地址: {{ info.whepUrl }}
  </div>
</div>

<div class="quality-charts">
  <div class="chart-container">
    <h3>视频码率历史</h3>
    <canvas ref="videoBitrateChart"></canvas>
  </div>
  <div class="chart-container">
    <h3>视频帧率历史</h3>
    <canvas ref="videoFrameRateChart"></canvas>
  </div>
  <div class="chart-container">
    <h3>音频码率历史</h3>
    <canvas ref="audioBitrateChart"></canvas>
  </div>
</div>
    </div>
  </div>
  <div v-else class="loading">初始化中，请稍候...</div>
</template>

<script setup>
import { ref, onMounted, onBeforeUnmount, nextTick } from 'vue'
import { initWebSocket, sendSignalingMessage, onSignalingMessage } from '@/api/webrtc/signaling'

// 初始化状态
const isReady = ref(false)
const errorMessage = ref('')
const connectionStatus = ref('未连接')
const callType = ref('video') // 通话类型：video/audio
const callMode = ref('one-to-one') // 通话模式：one-to-one/one-to-many
const localParticipantId = ref(crypto.randomUUID())
const streamName = ref('') // 流名称
const participants = ref([]) // 参与者列表
const playingStreams = ref(new Map()) // 正在播放的流

// 本地视频元素引用
const localVideo = ref(null)
// 本地媒体流
const localStream = ref(null)
// 所有对等连接
const peerConnections = ref(new Map())

// WebRTC 配置
const configuration = {
  iceServers: [
    { urls: 'stun:106.54.211.74:3478' },
    {
      urls: [
        'turn:106.54.211.74:3478?transport=udp',
        'turn:106.54.211.74:3478?transport=tcp',
        'turns:106.54.211.74:5349?transport=tcp'
      ],
      username: 'turn_user',
      credential: 'turn_password'
    }
  ],
  iceTransportPolicy: 'all',
  iceCandidatePoolSize: 10,
  bundlePolicy: 'balanced'
}

// WebSocket 连接
const ws = ref(null)

// 初始化WebSocket
const initWebSocketConnection = async () => {
  try {
    ws.value = await initWebSocket()
    isReady.value = true
  } catch (err) {
    errorMessage.value = 'WebSocket连接失败，请检查网络'
    console.error('WebSocket连接失败:', err)
  }
}

// 获取媒体流
async function getMediaStream() {
  try {
    const constraints = {
      audio: true,
      video: callType.value === 'video' ? {
        width: { ideal: 1280 },
        height: { ideal: 720 },
        frameRate: { ideal: 30 }
      } : false
    }
    return await navigator.mediaDevices.getUserMedia(constraints)
  } catch (error) {
    errorMessage.value = '媒体设备访问失败，请检查权限'
    console.error('获取媒体流失败:', error)
    return null
  }
}

// 获取屏幕共享流
async function getScreenStream() {
  try {
    return await navigator.mediaDevices.getDisplayMedia({
      video: {
        width: { ideal: 1280 },
        height: { ideal: 720 },
        frameRate: { ideal: 30 }
      },
      audio: true
    })
  } catch (error) {
    errorMessage.value = '屏幕共享失败，请检查权限'
    console.error('获取屏幕流失败:', error)
    return null
  }
}

// 开始屏幕共享
async function startScreenShare() {
  const screenStream = await getScreenStream()
  if (!screenStream) return

  // 停止当前视频轨道
  if (localStream.value) {
    localStream.value.getVideoTracks().forEach(track => track.stop())
  }

  // 合并音频和屏幕流
  const audioTrack = localStream.value?.getAudioTracks()[0]
  if (audioTrack) {
    screenStream.addTrack(audioTrack)
  }

  localStream.value = screenStream
  localVideo.value.srcObject = localStream.value

  // 更新所有对等连接的视频轨道
  peerConnections.value.forEach(pc => {
    const sender = pc.getSenders().find(s => s.track?.kind === 'video')
    if (sender) {
      sender.replaceTrack(screenStream.getVideoTracks()[0])
    }
  })
}

// 开始通话
async function startCall() {
  endCall()

  const stream = await getMediaStream()
  if (!stream) return

  localStream.value = stream
  if (localVideo.value) {
    localVideo.value.srcObject = localStream.value
  }

  if (callMode.value === 'one-to-one') {
    await createPeerConnection()
  } else {
    await createMultiplePeerConnections()
  }
}

// 创建单个对等连接
async function createPeerConnection() {
  const peerConnection = new RTCPeerConnection(configuration)

  // 添加本地媒体轨道
  localStream.value.getTracks().forEach(track => {
    if (callType.value === 'audio' && track.kind === 'video') return
    peerConnection.addTrack(track, localStream.value)
  })

  // 处理远程媒体流
  peerConnection.ontrack = event => {
    const remoteStream = event.streams[0]
    addParticipant(remoteStream)
  }

  // 处理ICE Candidate
  peerConnection.onicecandidate = event => {
    if (event.candidate) {
      sendSignalingMessage({
        type: 'candidate',
        candidate: event.candidate
      })
    }
  }

  // 创建offer
  const offer = await peerConnection.createOffer({
    offerToReceiveAudio: true,
    offerToReceiveVideo: callType.value === 'video'
  })

  await peerConnection.setLocalDescription(offer)

  if (ws.value) {
    ws.value.send(JSON.stringify({
      type: 'offer',
      sdp: offer.sdp
    }))
  }

  peerConnections.value.set(crypto.randomUUID(), peerConnection)
}

// 添加参与者
function addParticipant(stream) {
  const participant = {
    id: crypto.randomUUID(),
    stream,
    audioEnabled: true,
    videoEnabled: callType.value === 'video'
  }
  participants.value.push(participant)

  nextTick(() => {
    const videoElement = document.querySelector(`video[data-id="${participant.id}"]`)
    if (videoElement) {
      videoElement.srcObject = stream
    }
  })
}

// 结束通话
function endCall() {
  peerConnections.value.forEach(pc => pc.close())
  peerConnections.value.clear()
  participants.value = []

  if (localStream.value) {
    localStream.value.getTracks().forEach(track => track.stop())
    localStream.value = null
  }
}

// 切换本地音频
function toggleAudio() {
  if (localStream.value) {
    localStream.value.getAudioTracks().forEach(track => {
      track.enabled = !track.enabled
    })
  }
}

// 切换本地视频
function toggleVideo() {
  if (localStream.value) {
    localStream.value.getVideoTracks().forEach(track => {
      track.enabled = !track.enabled
    })
  }
}

// 切换参与者音频
function toggleParticipantAudio(participantId) {
  const participant = participants.value.find(p => p.id === participantId)
  if (participant) {
    participant.audioEnabled = !participant.audioEnabled
    participant.stream.getAudioTracks().forEach(track => {
      track.enabled = participant.audioEnabled
    })
  }
}

// 切换参与者视频
function toggleParticipantVideo(participantId) {
  const participant = participants.value.find(p => p.id === participantId)
  if (participant) {
    participant.videoEnabled = !participant.videoEnabled
    participant.stream.getVideoTracks().forEach(track => {
      track.enabled = participant.videoEnabled
    })
  }
}

// WebSocket 消息处理
onSignalingMessage(async (message) => {
  try {
    if (!message || !message.type) return

    switch (message.type) {
      case 'offer':
        if (message.sdp) await handleOffer(message.sdp)
        break
      case 'answer':
        if (message.sdp) await handleAnswer(message.sdp)
        break
      case 'candidate':
        if (message.candidate) await handleCandidate(message.candidate)
        break
    }
  } catch (error) {
    errorMessage.value = '信令处理失败'
    console.error('信令消息处理失败:', error)
  }
})

// 处理收到的offer
async function handleOffer(sdp) {
  const peerConnection = new RTCPeerConnection(configuration)

  if (localStream.value) {
    localStream.value.getTracks().forEach(track => {
      if (callType.value === 'audio' && track.kind === 'video') return
      peerConnection.addTrack(track, localStream.value)
    })
  }

  await peerConnection.setRemoteDescription(new RTCSessionDescription({
    type: 'offer',
    sdp
  }))

  const answer = await peerConnection.createAnswer()
  await peerConnection.setLocalDescription(answer)

  if (ws.value) {
    ws.value.send(JSON.stringify({
      type: 'answer',
      sdp: answer.sdp
    }))
  }

  peerConnections.value.set(crypto.randomUUID(), peerConnection)
}

// 处理收到的answer
async function handleAnswer(sdp) {
  const peerConnection = Array.from(peerConnections.value.values())
    .find(pc => pc.remoteDescription && pc.remoteDescription.type === 'offer')

  if (peerConnection) {
    await peerConnection.setRemoteDescription(new RTCSessionDescription({
      type: 'answer',
      sdp
    }))
  }
}

// 处理收到的candidate
async function handleCandidate(candidate) {
  const peerConnection = Array.from(peerConnections.value.values())
    .find(pc => pc.remoteDescription)

  if (peerConnection) {
    await peerConnection.addIceCandidate(new RTCIceCandidate(candidate))
  }
}

// SRS WebRTC配置
const srsConfig = {
  whipUrl: 'http://localhost:1985/rtc/v1/whip/',
  whepUrl: 'http://localhost:1985/rtc/v1/whep/',
  streamName: 'stream_' + crypto.randomUUID().slice(0, 8),
  playingStreams: new Map(), // 正在播放的流
  apiUrl: 'http://localhost:1985/api/v1/',
  checkInterval: null,
  statsInterval: null,
  qualityHistory: {
    video: {
      bitrate: [],
      frameRate: []
    },
    audio: {
      bitrate: []
    }
  },
  quality: {
    video: {
      width: 1280,
      height: 720,
      frameRate: 30,
      bitrate: 2500,
      minBitrate: 1000,
      maxBitrate: 4000
    },
    audio: {
      bitrate: 128,
      minBitrate: 64,
      maxBitrate: 192
    },
    thresholds: {
      videoBitrateWarning: 1500,
      videoBitrateCritical: 1000,
      frameRateWarning: 20,
      frameRateCritical: 15,
      audioBitrateWarning: 96,
      audioBitrateCritical: 64
    }
  }
}

// 获取推流质量统计数据
async function getStreamStats(peerConnection) {
  try {
    const stats = await peerConnection.getStats()
    const results = {}

    stats.forEach(report => {
      if (report.type === 'outbound-rtp') {
        results[report.kind] = {
          bitrate: report.bytesSent,
          packetsSent: report.packetsSent,
          framesEncoded: report.framesEncoded,
          frameRate: report.framesPerSecond
        }
      }
    })

    return results
  } catch (error) {
    console.error('获取推流统计数据失败:', error)
    return null
  }
}

// 调整推流质量
function adjustStreamQuality(peerConnection, stats) {
  try {
    // 调整视频质量
    const videoSender = peerConnection.getSenders().find(s => s.track?.kind === 'video')
    if (videoSender && stats.video) {
      const parameters = videoSender.getParameters()
      if (!parameters.encodings) {
        parameters.encodings = [{}]
      }

      // 计算目标码率
      let targetBitrate = Math.min(
        srsConfig.quality.video.maxBitrate * 1000,
        Math.max(
          srsConfig.quality.video.minBitrate * 1000,
          stats.video.bitrate * 1.2
        )
      )

      parameters.encodings[0].maxBitrate = targetBitrate
      videoSender.setParameters(parameters)

      // 检查视频质量告警
      checkQualityAlerts('video', {
        bitrate: stats.video.bitrate / 1024,
        frameRate: stats.video.frameRate
      })
    }

    // 调整音频质量
    const audioSender = peerConnection.getSenders().find(s => s.track?.kind === 'audio')
    if (audioSender && stats.audio) {
      const parameters = audioSender.getParameters()
      if (!parameters.encodings) {
        parameters.encodings = [{}]
      }

      // 计算目标码率
      let targetBitrate = Math.min(
        srsConfig.quality.audio.maxBitrate * 1000,
        Math.max(
          srsConfig.quality.audio.minBitrate * 1000,
          stats.audio.bitrate * 1.2
        )
      )

      parameters.encodings[0].maxBitrate = targetBitrate
      audioSender.setParameters(parameters)

      // 检查音频质量告警
      checkQualityAlerts('audio', {
        bitrate: stats.audio.bitrate / 1024
      })
    }
  } catch (error) {
    console.error('调整推流质量失败:', error)
  }
}

// 检查质量告警
function checkQualityAlerts(type, stats) {
  const thresholds = srsConfig.quality.thresholds

  // 记录质量历史数据
  if (type === 'video') {
    srsConfig.qualityHistory.video.bitrate.push(stats.bitrate)
    srsConfig.qualityHistory.video.frameRate.push(stats.frameRate)

    // 保持历史数据长度
    if (srsConfig.qualityHistory.video.bitrate.length > 100) {
      srsConfig.qualityHistory.video.bitrate.shift()
      srsConfig.qualityHistory.video.frameRate.shift()
    }

    if (stats.bitrate < thresholds.videoBitrateCritical) {
      errorMessage.value = `视频码率过低: ${stats.bitrate.toFixed(2)}kbps (临界值: ${thresholds.videoBitrateCritical}kbps)`
    } else if (stats.bitrate < thresholds.videoBitrateWarning) {
      errorMessage.value = `视频码率警告: ${stats.bitrate.toFixed(2)}kbps (警告值: ${thresholds.videoBitrateWarning}kbps)`
    }

    if (stats.frameRate < thresholds.frameRateCritical) {
      errorMessage.value = `帧率过低: ${stats.frameRate.toFixed(2)}fps (临界值: ${thresholds.frameRateCritical}fps)`
    } else if (stats.frameRate < thresholds.frameRateWarning) {
      errorMessage.value = `帧率警告: ${stats.frameRate.toFixed(2)}fps (警告值: ${thresholds.frameRateWarning}fps)`
    }
  } else if (type === 'audio') {
    srsConfig.qualityHistory.audio.bitrate.push(stats.bitrate)

    // 保持历史数据长度
    if (srsConfig.qualityHistory.audio.bitrate.length > 100) {
      srsConfig.qualityHistory.audio.bitrate.shift()
    }

    if (stats.bitrate < thresholds.audioBitrateCritical) {
      errorMessage.value = `音频码率过低: ${stats.bitrate.toFixed(2)}kbps (临界值: ${thresholds.audioBitrateCritical}kbps)`
    } else if (stats.bitrate < thresholds.audioBitrateWarning) {
      errorMessage.value = `音频码率警告: ${stats.bitrate.toFixed(2)}kbps (警告值: ${thresholds.audioBitrateWarning}kbps)`
    }
  }
}

// 检查SRS服务器状态
async function checkSrsStatus() {
  try {
    const response = await fetch(`${srsConfig.apiUrl}versions`)
    if (!response.ok) {
      throw new Error(`SRS API请求失败: ${response.status}`)
    }
    const data = await response.json()
    console.log('SRS服务器状态:', data)
    return true
  } catch (error) {
    console.error('检查SRS服务器状态失败:', error)
    errorMessage.value = '无法连接SRS服务器，请检查服务器状态'
    return false
  }
}

// 开始推流前检查
async function beforePushStream() {
  if (!await checkSrsStatus()) {
    return false
  }

  // 检查WHIP/WHEP URL是否可访问
  try {
    const whipResponse = await fetch(srsConfig.whipUrl, { method: 'OPTIONS' })
    if (!whipResponse.ok) {
      throw new Error(`WHIP URL不可访问: ${whipResponse.status}`)
    }

    const whepResponse = await fetch(srsConfig.whepUrl, { method: 'OPTIONS' })
    if (!whepResponse.ok) {
      throw new Error(`WHEP URL不可访问: ${whepResponse.status}`)
    }

    return true
  } catch (error) {
    console.error('检查推流URL失败:', error)
    errorMessage.value = '推流URL不可访问，请检查配置'
    return false
  }
}

// 推视频流到SRS
async function pushVideoToSRS() {
  if (!localStream.value) return

  const videoTrack = localStream.value.getVideoTracks()[0]
  if (!videoTrack) return

  const mediaStream = new MediaStream([videoTrack])
  await pushStreamToSRS(mediaStream)
}

// 推音频流到SRS
async function pushAudioToSRS() {
  if (!localStream.value) return

  const audioTrack = localStream.value.getAudioTracks()[0]
  if (!audioTrack) return

  const mediaStream = new MediaStream([audioTrack])
  await pushStreamToSRS(mediaStream)
}

// 推音视频流到SRS
async function pushAVToSRS() {
  if (!localStream.value) return

  const videoTrack = localStream.value.getVideoTracks()[0]
  const audioTrack = localStream.value.getAudioTracks()[0]

  if (!videoTrack || !audioTrack) return

  const mediaStream = new MediaStream([videoTrack, audioTrack])
  await pushStreamToSRS(mediaStream)
}

    // 通过WHIP协议推流到SRS
    async function pushStreamToSRS(stream) {
      try {
        // 检查是否有有效的音视频轨道
        const videoTracks = stream.getVideoTracks()
        const audioTracks = stream.getAudioTracks()

        if (videoTracks.length === 0 && audioTracks.length === 0) {
          throw new Error('未找到有效的音视频轨道')
        }

        // 验证视频流有效性
        if (videoTracks.length > 0) {
          const videoTrack = videoTracks[0]
          if (!videoTrack.enabled || videoTrack.muted) {
            throw new Error('视频轨道被禁用或静音')
          }

          // 添加视频轨道状态监听
          videoTrack.onmute = () => {
            throw new Error('视频轨道被静音')
          }
          videoTrack.onunmute = () => {
            console.log('视频轨道取消静音')
          }
          videoTrack.onended = () => {
            throw new Error('视频轨道已结束')
          }
        }

        const peerConnection = new RTCPeerConnection(configuration)
        // 视频流状态监控变量
        const stats = {
          timeout: null,
          hasValidFrames: false,
          lastFrameTime: null,
          startTime: Date.now()
        }

        // 添加媒体轨道并监听状态变化
        stream.getTracks().forEach(track => {
          const sender = peerConnection.addTrack(track, stream)

          // 监听轨道状态
          track.onended = () => {
            console.log('Track ended:', track.kind)
            stopPushStream()
          }

          track.onmute = () => {
            console.log('Track muted:', track.kind)
            throw new Error(`${track.kind} 轨道被静音`)
          }

          track.onunmute = () => {
            console.log('Track unmuted:', track.kind)
          }

          // 对于视频轨道，添加帧率检测
          if (track.kind === 'video') {
            stats.timeout = setInterval(async () => {
              try {
                const statsReport = await peerConnection.getStats(track)
                statsReport.forEach(report => {
                  if (report.type === 'outbound-rtp' &&
                      report.kind === 'video' &&
                      report.framesEncoded > 0) {
                    stats.hasValidFrames = true
                  }
                })

                // 如果10秒内没有有效帧，抛出错误
                if (!stats.hasValidFrames && Date.now() - stats.startTime > 10000) {
                  throw new Error('视频流无效：10秒内未检测到有效帧')
                }
              } catch (error) {
                console.error('视频流状态检测失败:', error)
                throw error
              }
            }, 1000)
          }
        })

        const startTime = Date.now()

        // 创建offer并设置编码参数
        const offer = await peerConnection.createOffer({
          offerToReceiveAudio: false,
          offerToReceiveVideo: false
        })

        // 设置连接超时
        const connectionTimeout = setTimeout(() => {
          throw new Error('推流连接超时')
        }, 10000)

        // 设置编码参数
        const senders = peerConnection.getSenders()
        senders.forEach(sender => {
          if (sender.track?.kind === 'video') {
            const parameters = sender.getParameters()
            if (!parameters.encodings) {
              parameters.encodings = [{}]
            }
            parameters.encodings[0].maxBitrate = 2 * 1000 * 1000 // 2 Mbps
            sender.setParameters(parameters)
          }
        })

        await peerConnection.setLocalDescription(offer)

        // 通过WHIP协议推流
        const response = await fetch(`${srsConfig.whipUrl}?app=live&stream=${srsConfig.streamName}`, {
          method: 'POST',
          headers: {
            'Content-Type': 'application/sdp'
          },
          body: offer.sdp
        })

        // 清除连接超时
        clearTimeout(connectionTimeout)

        if (!response.ok) {
          const errorText = await response.text()
          throw new Error(`推流失败: ${response.statusText} - ${errorText}`)
        }

        // 验证初始帧
        const initialFrameTimeout = setTimeout(() => {
          if (!stats.hasValidFrames) {
            throw new Error('推流成功但未检测到有效视频帧')
          }
        }, 5000)

        const answerSDP = await response.text()
        await peerConnection.setRemoteDescription(new RTCSessionDescription({
          type: 'answer',
          sdp: answerSDP
        }))

        // 清除初始帧检测
        clearTimeout(initialFrameTimeout)

        // 添加流信息到playingStreams
        playingStreams.value.set(srsConfig.streamName, {
          peerConnection,
          stream,
          whipUrl: `${srsConfig.whipUrl}?app=live&stream=${srsConfig.streamName}`,
          whepUrl: `${srsConfig.whepUrl}?app=live&stream=${srsConfig.streamName}`,
          statsInterval: setInterval(async () => {
            try {
              const stats = await peerConnection.getStats()
              stats.forEach(report => {
                if (report.type === 'outbound-rtp') {
                  console.log(`${report.kind} stats:`, {
                    bitrate: report.bitrate,
                    packetsSent: report.packetsSent,
                    framesEncoded: report.framesEncoded,
                    frameRate: report.framesPerSecond
                  })

                  // 如果视频帧率持续为0超过5秒，抛出错误
                  if (report.kind === 'video' && report.framesEncoded === 0) {
                    const now = Date.now()
                    if (!stats.lastFrameTime) {
                      stats.lastFrameTime = now
                    } else if (now - stats.lastFrameTime > 5000) {
                      throw new Error('视频流异常：5秒内未检测到视频帧')
                    }
                  } else {
                    stats.lastFrameTime = null
                  }
                }
              })
            } catch (error) {
              console.error('获取推流统计失败:', error)
              throw error
            }
          }, 1000),
          cleanup: () => {
            clearInterval(statsTimeout)
            clearTimeout(connectionTimeout)
            clearTimeout(initialFrameTimeout)
          }
        })

        connectionStatus.value = '推流中...'
        console.log('推流成功:', {
          videoTracks: videoTracks.length,
          audioTracks: audioTracks.length,
          streamName: srsConfig.streamName
        })
  } catch (error) {
    errorMessage.value = '推流失败: ' + error.message
    console.error('推流失败:', error)
  }
}

    // 播放SRS视频流
    async function playVideoStream() {
      if (!streamName.value) {
        errorMessage.value = '请输入流名称'
        return
      }

      // 清理之前的播放器
      const existing = srsConfig.playingStreams.get(streamName.value)
      if (existing) {
        if (existing.videoElement) {
          existing.videoElement.remove()
        }
        if (existing.peerConnection) {
          existing.peerConnection.close()
        }
        srsConfig.playingStreams.delete(streamName.value)
      }

      try {
        const peerConnection = new RTCPeerConnection(configuration)

        // 处理远程视频流
        peerConnection.ontrack = event => {
          const remoteStream = event.streams[0]
          if (!remoteStream) {
            throw new Error('未收到视频流')
          }

          // 创建视频元素
          const videoContainer = document.querySelector('.video-container')
          const videoElement = document.createElement('video')
          videoElement.autoplay = true
          videoElement.controls = true
          videoElement.style.width = '100%'
          videoElement.style.maxWidth = '800px'
          videoElement.srcObject = remoteStream

          // 添加状态监听
          videoElement.onloadedmetadata = () => {
            console.log('视频流已加载，开始播放')
            connectionStatus.value = '正在播放视频流'
          }

          videoElement.onerror = (err) => {
            console.error('视频播放错误:', err)
            errorMessage.value = '视频播放失败'
          }

          // 添加轨道状态监听
          remoteStream.getTracks().forEach(track => {
            track.onended = () => {
              console.log('Track ended:', track.kind)
              stopPlayStream(streamName.value)
            }
            track.onmute = () => {
              console.log('Track muted:', track.kind)
              errorMessage.value = `${track.kind} 轨道被静音`
            }
            track.onunmute = () => {
              console.log('Track unmuted:', track.kind)
            }
          })

          // 添加到DOM
          videoContainer.innerHTML = '' // 清空之前的视频
          videoContainer.appendChild(videoElement)

          // 保存流信息
          srsConfig.playingStreams.set(streamName.value, {
            peerConnection,
            videoElement,
            stream: remoteStream,
            stats: {
              lastFrameTime: null,
              statsInterval: setInterval(async () => {
                try {
                  const stats = await peerConnection.getStats()
                  stats.forEach(report => {
                    if (report.type === 'inbound-rtp' && report.kind === 'video') {
                      // 检查视频帧率
                      if (report.framesDecoded === 0) {
                        const now = Date.now()
                        if (!srsConfig.playingStreams.get(streamName.value).stats.lastFrameTime) {
                          srsConfig.playingStreams.get(streamName.value).stats.lastFrameTime = now
                        } else if (now - srsConfig.playingStreams.get(streamName.value).stats.lastFrameTime > 5000) {
                          throw new Error('视频流异常：5秒内未检测到视频帧')
                        }
                      } else {
                        srsConfig.playingStreams.get(streamName.value).stats.lastFrameTime = null
                      }
                    }
                  })
                } catch (error) {
                  console.error('获取播放统计失败:', error)
                  throw error
                }
              }, 1000)
            }
          })
        }

        // ICE Candidate处理
        peerConnection.onicecandidate = event => {
          if (event.candidate) {
            console.log('收到ICE candidate:', event.candidate)
          }
        }

        // 创建offer
        const offer = await peerConnection.createOffer({
          offerToReceiveVideo: true,
          offerToReceiveAudio: true
        })
        await peerConnection.setLocalDescription(offer)

        // 通过WHEP协议播放流
        const response = await fetch(`${srsConfig.whepUrl}?app=live&stream=${streamName.value}`, {
          method: 'POST',
          headers: {
            'Content-Type': 'application/sdp'
          },
          body: offer.sdp
        })

        if (!response.ok) {
          const errorText = await response.text()
          throw new Error(`播放失败: ${response.statusText} - ${errorText}`)
        }

        const answerSDP = await response.text()
        await peerConnection.setRemoteDescription(new RTCSessionDescription({
          type: 'answer',
          sdp: answerSDP
        }))

        console.log('视频流播放成功:', {
          streamName: streamName.value,
          whepUrl: `${srsConfig.whepUrl}?app=live&stream=${streamName.value}`
        })
      } catch (error) {
        console.error('播放视频流失败:', error)
        errorMessage.value = '播放视频流失败: ' + error.message
        connectionStatus.value = '播放失败'
      }
    }

    // 播放SRS音频流
    async function playAudioStream() {
      if (!streamName.value) return

      try {
        const peerConnection = new RTCPeerConnection(configuration)

        // 处理远程音频流
        peerConnection.ontrack = event => {
          const remoteStream = event.streams[0]
          const audioElement = document.createElement('audio')
          audioElement.autoplay = true
          audioElement.srcObject = remoteStream
          document.body.appendChild(audioElement)

          // 保存流信息
          srsConfig.playingStreams.set(streamName.value, {
            peerConnection,
            audioElement
          })
        }

        // 创建offer
        const offer = await peerConnection.createOffer({
          offerToReceiveVideo: false,
          offerToReceiveAudio: true
        })
        await peerConnection.setLocalDescription(offer)

        // 直接通过WHEP协议ya流
        const response = await fetch(`${srsConfig.whepUrl}?app=live&stream=${streamName.value}`, {
          method: 'POST',
          headers: {
            'Content-Type': 'application/sdp'
          },
          body: offer.sdp
        })

        if (response.ok) {
          const answerSDP = await response.text()
          await peerConnection.setRemoteDescription(new RTCSessionDescription({
            type: 'answer',
            sdp: answerSDP
          }))
        } else {
          throw new Error(`播放音频流失败: ${response.statusText}`)
        }
      } catch (error) {
        console.error('播放音频流失败:', error)
        errorMessage.value = '播放音频流失败: ' + error.message
      }
    }

// 停止推流
async function stopPushStream() {
  try {
    const ws = await initWebSocket()
    ws.send(JSON.stringify({
      type: 'stop',
      streamId: srsConfig.streamName
    }))
    connectionStatus.value = '推流已停止'
  } catch (error) {
    console.error('停止推流失败:', error)
  }
}

// 停止播放流
async function stopPlayStream(streamName) {
  try {
    const streamInfo = srsConfig.playingStreams.get(streamName)
    if (streamInfo) {
      // 关闭对等连接
      if (streamInfo.peerConnection) {
        streamInfo.peerConnection.close()
      }
      // 移除视频/音频元素
      if (streamInfo.videoElement) {
        streamInfo.videoElement.remove()
      }
      if (streamInfo.audioElement) {
        streamInfo.audioElement.remove()
      }
      // 从Map中移除
      srsConfig.playingStreams.delete(streamName)
    }
  } catch (error) {
    console.error('停止播放流失败:', error)
  }
}

// 播放多个音频流
async function playMultipleAudioStreams(streamNames) {
  try {
    for (const name of streamNames) {
      await playAudioStream(name)
    }
  } catch (error) {
    console.error('播放多个音频流失败:', error)
  }
}

// 组件卸载时清理
onBeforeUnmount(() => {
  // 清理所有播放的流
  srsConfig.playingStreams.forEach((_, streamName) => {
    stopPlayStream(streamName)
  })
  endCall()
  if (ws.value) {
    ws.value.close()
    ws.value = null
  }
})

onMounted(() => {
  initWebSocketConnection()
})
</script>

<style scoped>
.webrtc-container {
  display: flex;
  flex-direction: column;
  height: 100%;
}

.video-container {
  display: flex;
  flex-wrap: wrap;
  gap: 10px;
  flex: 1;
}

video {
  width: 300px;
  height: 200px;
  background: #000;
  border: 1px solid #ccc;
}

.controls {
  display: flex;
  gap: 10px;
  padding: 10px;
  justify-content: center;
}

select, button {
  padding: 8px 16px;
  background: #409eff;
  color: white;
  border: none;
  border-radius: 4px;
  cursor: pointer;
}

select {
  background: #fff;
  color: #333;
  border: 1px solid #ccc;
}

button:hover {
  background: #66b1ff;
}

.loading {
  display: flex;
  justify-content: center;
  align-items: center;
  height: 100%;
  font-size: 1.2em;
}

.error-message {
  color: red;
  padding: 10px;
  text-align: center;
  background: #ffe6e6;
  border: 1px solid #ffcccc;
  border-radius: 4px;
  margin: 10px;
}

.participant-controls {
  display: flex;
  gap: 5px;
  margin-top: 5px;
}

.play-controls {
  margin-top: 20px;
  display: flex;
  flex-direction: column;
  gap: 10px;
  padding: 15px;
  background: #f5f7fa;
  border-radius: 8px;
}

.play-controls .control-group {
  display: flex;
  gap: 10px;
  align-items: center;
}

.play-controls input {
  padding: 8px 12px;
  border: 1px solid #dcdfe6;
  border-radius: 4px;
  flex: 1;
}

.play-controls button {
  padding: 8px 16px;
  background: #409eff;
  color: white;
  border: none;
  border-radius: 4px;
  cursor: pointer;
}

.play-controls button:hover {
  background: #66b1ff;
}

.play-controls button.stop {
  background: #f56c6c;
}

.play-controls button.stop:hover {
  background: #f78989;
}

.srs-controls {
  margin-top: 20px;
  padding: 15px;
  background: #f5f7fa;
  border-radius: 8px;
}

.srs-controls .control-group {
  display: flex;
  gap: 10px;
  align-items: center;
}

.srs-controls input {
  padding: 8px 12px;
  border: 1px solid #dcdfe6;
  border-radius: 4px;
  flex: 1;
}

.srs-controls button {
  padding: 8px 16px;
  background: #409eff;
  color: white;
  border: none;
  border-radius: 4px;
  cursor: pointer;
}

.srs-controls button.stop {
  background: #f56c6c;
}

.srs-controls button.stop:hover {
  background: #f78989;
}

.stream-urls {
  margin-top: 15px;
}

.stream-url {
  margin-top: 10px;
  padding: 10px;
  background: #fff;
  border: 1px solid #ebeef5;
  border-radius: 4px;
  font-size: 0.9em;
  color: #666;
}
</style>
