<template>
  <!-- 视频区域 -->
  <div class="main-video">
    <div id="video-placeholder" :class="gridStyleClass">
      <!-- 房间人数显示 -->
      <div class="room-participants-display">
        <el-icon class="participants-icon"><User /></el-icon>
        <span class="participants-text"
          >{{ userList.length }}/{{ roomStore.currentRoom.capacity }}</span
        >
      </div>
      <!-- 视频容器 -->
      <div
        v-for="user in userList"
        :key="user.userId"
        :id="user.userId + '-container'"
        class="remote-video-container"
      >
        <video
          v-show="user.isStreaming"
          :ref="(el) => setVideoElement(el, user.userId)"
          :srcObject="user.videoStream"
          autoplay
          class="remote-video"
        ></video>
        <div v-if="!user.isStreaming" class="remote-video no-stream">
          <el-avatar :size="80" :src="user.avatarUrl">
            {{ user.nickname.charAt(0) }}
          </el-avatar>
          <p>{{ user.nickname }}</p>
        </div>
        <!-- 视频和语音状态展示 -->
        <div class="video-overlay">
          <div
            class="video-status-overlay"
            :class="{ 'video-active': user.isStreaming !== 0 }"
          >
            <el-icon v-if="user.isStreaming === 1" class="overlay-icon"
              ><Camera
            /></el-icon>
            <el-icon v-else-if="user.isStreaming === 2" class="overlay-icon"
              ><Monitor
            /></el-icon>
            <el-icon v-else class="overlay-icon"><Headset /></el-icon>
          </div>
          <div
            class="audio-status-overlay"
            :class="{ 'audio-active': !user.isMuted }"
            :style="{ backgroundColor: getAudioLevelColor(user) }"
          >
            <el-icon v-if="user.isMuted" class="overlay-icon"><Mute /></el-icon>
            <el-icon v-else class="overlay-icon"><Microphone /></el-icon>
          </div>
        </div>
      </div>
    </div>
    <!-- 控件与成员列表 -->
    <div class="append-video">
      <!-- 成员列表 -->
      <transition name="list-fade" mode="out-in">
        <div v-show="isMemberListVisible" class="participants-videos">
          <div
            v-for="user in userList"
            :key="user.userId"
            class="participant-video"
            @click.stop="focusOnUser(user.userId)"
          >
            <div class="video-frame">
              <el-avatar :size="60" :src="user.avatarUrl">
                {{ user.nickname.charAt(0) }}
              </el-avatar>
              <!-- 视频状态图标 -->
              <div
                class="video-status"
                :class="{ 'video-active': user.isStreaming !== 0 }"
              >
                <el-icon v-if="user.isStreaming === 1" class="video-icon"
                  ><Camera
                /></el-icon>
                <el-icon v-else-if="user.isStreaming === 2" class="video-icon"
                  ><Monitor
                /></el-icon>
                <el-icon v-else class="video-icon"><Headset /></el-icon>
              </div>
              <!-- 语音状态图标 -->
              <div
                class="audio-status"
                :class="{ 'audio-active': !user.isMuted }"
                :style="{ backgroundColor: getAudioLevelColor(user) }"
              >
                <el-icon v-if="user.isMuted" class="audio-icon"
                  ><Mute
                /></el-icon>
                <el-icon v-else class="audio-icon"><Microphone /></el-icon>
              </div>
            </div>
            <div class="participant-info">
              <span class="participant-name">{{ user.nickname }}</span>
              <el-tag :type="getStatusType(user.status)" size="small">
                {{ getStatusText(user.status) }}
              </el-tag>
            </div>
          </div>
        </div>
      </transition>
      <!-- 控件区域 -->
      <div class="video-controls">
        <div>
          <el-row>
            <el-col :span="24" class="text-center">
              <!-- 摄像头开关 -->
              <el-button
                v-if="cameraSwitch"
                @click.stop="toggleCamera()"
                title="关闭摄像头"
                icon="VideoCamera"
                type="success"
                circle
              ></el-button>
              <el-button
                v-else
                @click.stop="toggleCamera()"
                title="开启摄像头"
                icon="VideoCamera"
                type="primary"
                circle
              ></el-button>
              <!-- 麦克风开关 -->
              <el-button
                v-if="micSwitch"
                @click.stop="toggleMic()"
                title="关闭麦克风"
                icon="Microphone"
                type="success"
                circle
              ></el-button>
              <el-button
                v-else
                @click.stop="toggleMic()"
                title="开启麦克风"
                icon="Microphone"
                type="primary"
                circle
              ></el-button>
              <!-- 投屏开关 -->
              <el-button
                v-if="shareSwitch"
                @click.stop="toggleShare()"
                title="停止投屏"
                icon="Monitor"
                type="success"
                circle
              ></el-button>
              <el-button
                v-else
                @click.stop="toggleShare()"
                title="开启投屏"
                icon="Monitor"
                type="primary"
                circle
              ></el-button>
              <!-- 查看成员 -->
              <el-button
                v-if="isMemberListVisible"
                @click.stop="userListBtn"
                title="查看成员"
                icon="User"
                type="success"
                circle
              ></el-button>
              <el-button
                v-else
                @click.stop="userListBtn"
                title="查看成员"
                icon="User"
                type="primary"
                circle
              ></el-button>
              <!-- 视图切换 -->
              <el-button
                @click.stop="toggleGrid()"
                title="切换视图"
                icon="Grid"
                type="primary"
                circle
              ></el-button>
              <!-- 邀请按钮 -->
              <el-button
                @click.stop="inviteBtn()"
                title="发送邀请"
                icon="Share"
                type="success"
                circle
              ></el-button>
            </el-col>
          </el-row>
        </div>
        <el-button
          @click.stop="handleExitRoom()"
          title="退出自习室"
          icon="Close"
          type="danger"
          round
          style="z-index: 1"
          >结束</el-button
        >
      </div>
    </div>
  </div>
</template>
<script setup lang="ts">
//import
import {
  ref,
  onMounted,
  nextTick,
  computed,
  Transition,
  onUpdated,
  onUnmounted,
} from "vue";
import { useRouter } from "vue-router";
import { ElMessage, ElMessageBox } from "element-plus";
import { useStudyRoomStore } from "@/stores/modules/studyroom";
import { useUserStore } from "@/stores/modules/user";
import { Client } from "@stomp/stompjs";
import { initRoom } from "@/api/room";
import { getUserInfo } from "@/api/user";
import { use } from "react";
onMounted(() => {
  userId.value = userStore.currentUser.userId;
  roomId.value = roomStore.currentRoom.roomId;
  paramObj.value.roomId = roomId.value;
  paramObj.value.fromUserId = userId.value;
  console.log(`info: ${userId.value} 进入房间 ${roomId.value}`);
  handleEnterRoom();
});
onUnmounted(() => {
  handleExitRoom();
});
// 设置 video 元素的 ref
const setVideoElement = (el, userId) => {
  if (el) {
    const user = userMap.value.get(userId);
    if (user) {
      // 将 video 元素存储在 userMap 中
      user.videoElement = el;
    }
  }
};
const router = useRouter();
//获取用户，房间信息
const roomStore = useStudyRoomStore();
const userStore = useUserStore();
const roomId = ref(null);
const userId = ref(null);
//页面变量
const isMemberListVisible = ref(false); // 控制成员列表的显示与隐藏
const cameraSwitch = ref(false); //控制摄像头开关
const micSwitch = ref(false); //控制麦克风开关
const shareSwitch = ref(false); //控制屏幕共享开关
const gridStyle = ref(0); //控制视频布局样式，0单人，1双人，2九宫格
const gridStyleClass = computed(() => {
  switch (gridStyle.value) {
    case 0:
      return "single-row";
    case 1:
      return "double-row";
    case 2:
      return "triple-row";
    default:
      return "single-row";
  }
});
//WebRTC变量
//ICE,STUN,TURN服务器配置
const RTCConfiguration: RTCConfiguration = {
  iceServers: [
    {
      urls: ["stun:turn.neptech.net:3478"], // 默认的STUN服务器
    },
    {
      urls: ["turns:turn.neptech.net:5349"], // 自己的STUN服务器
      username: "neptech",
      credential: "neptech.net",
    },
  ],
  // 这里的配置会启用仅中转
  // iceTransportPolicy: "relay",
};
const localVideo = ref(null); //页面标签用本地视频对象
let MQClient = new Client(); // 消息队列客户端
//sdp消息对象
const paramObj = ref({
  type: "",
  roomId: roomId.value,
  data: "",
  fromUserId: userId.value,
  toUserId: "",
});
// 重置消息参数
const resetMsgParam = () => {
  paramObj.value.toUserId = "";
  paramObj.value.type = "";
  paramObj.value.data = "";
};

let heartBeatInterval = null;
const sendHeartBeat = () => {
  resetMsgParam();
  paramObj.value.type = "heartbeat";
  heartBeatInterval = setInterval(() => {
    console.log("发送心跳包");
    MQClient.publish({
      destination: `/exchange/room_exchange/sdp`,
      body: JSON.stringify(paramObj.value),
    });
  }, 30000);
};
const stopHeartBeat = () => {
  if (heartBeatInterval) {
    clearInterval(heartBeatInterval);
  }
};
// 发送SDP消息
const sendSdpMessage = (type, data, to) => {
  resetMsgParam();
  paramObj.value.toUserId = to;
  paramObj.value.type = type;
  paramObj.value.data = data;
  MQClient.publish({
    destination: `/exchange/room_exchange/sdp.${roomId.value}.${to}`,
    body: JSON.stringify(paramObj.value),
  });
};
// 广播SDP消息
const bdcSdpMessage = (type) => {
  resetMsgParam();
  paramObj.value.type = type;
  MQClient.publish({
    destination: `/exchange/room_exchange/sdp.${roomId.value}`,
    body: JSON.stringify(paramObj.value),
  });
};

let localStream; // 本地流数据
//定义用户对象，声明用户列表
interface User {
  userId: string;
  avatarUrl: string;
  nickname: string;
  status: string; // 用户状态
  peerConnection: RTCPeerConnection;
  candidateInfo: Array<any>;
  isProcessingCandidate: Boolean;
  videoStream: MediaStream;
  videoElement: HTMLVideoElement;
  isMuted: Boolean;
  isStreaming: number;
  audioLevel?: number; // 添加音频级别属性
  audioContext?: AudioContext; // 添加音频上下文属性
  analyser?: AnalyserNode; // 添加分析器属性
  microphone?: MediaStreamAudioSourceNode; // 添加麦克风源属性
}
const userMap = ref(new Map<String, User>()); // 用户列表
const userList = computed(() => Array.from(userMap.value.values()));

//摄像头开关（开关方法同逻辑，仅对第一个方法备注解释）
const toggleCamera = () => {
  //切换开关状态
  cameraSwitch.value = !cameraSwitch.value;
  if (cameraSwitch.value) {
    cameraOn();
    bdcSdpMessage("camOn");
  } else {
    cameraOff();
    bdcSdpMessage("camOff");
  }
  //不关闭摄像头的情况下停止推流
  // localStream.getVideoTracks()[0].enabled = cameraSwitch.value;
};
//PC传输轨道变更
const pcTrackChange = (stream: MediaStream, type: string) => {
  if (type == "video") {
    userMap.value.forEach((user) => {
      if (!user.peerConnection) return;
      user.peerConnection
        .getSenders()
        .find((sender) => sender.track.kind === stream.getVideoTracks()[0].kind)
        .replaceTrack(stream.getVideoTracks()[0]);
    });
  } else if (type == "audio") {
    userMap.value.forEach((user) => {
      if (!user.peerConnection) return;
      console.log("执行了轨道切换");
      user.peerConnection
        .getSenders()
        .find((sender) => sender.track.kind === stream.getAudioTracks()[0].kind)
        .replaceTrack(stream.getAudioTracks()[0]);
    });
  }
};
//开启摄像头
const cameraOn = async () => {
  //先检测屏幕共享是否开启，如开启则关闭屏幕共享
  if (shareSwitch.value) {
    shareSwitch.value = !shareSwitch.value;
    shareOff();
  }
  //获取摄像头数据流
  const videoElement = await navigator.mediaDevices.getUserMedia({
    video: true,
  });
  //替换传输至远端的视频流
  pcTrackChange(videoElement, "video");
  //替换本地视频对象的流
  const user = userMap.value.get(userId.value);
  user.videoStream.removeTrack(localStream.getVideoTracks()[0]);
  user.videoStream.addTrack(videoElement.getVideoTracks()[0]);
  user.isStreaming = 1;
};
//关闭摄像头
const cameraOff = () => {
  //从本地流中关闭对应视频轨道
  userMap.value
    .get(userId.value)
    .videoStream.getVideoTracks()
    .forEach((track) => track.stop());
  userMap.value.get(userId.value).isStreaming = 0;
};
//麦克风开关
const toggleMic = () => {
  micSwitch.value = !micSwitch.value;
  if (micSwitch.value) {
    micOn();
    bdcSdpMessage("unMuted");
  } else {
    micOff();
    bdcSdpMessage("muted");
  }
};
//开启麦克风
const micOn = async () => {
  const audioStream = await navigator.mediaDevices.getUserMedia({
    audio: true,
  });
  pcTrackChange(audioStream, "audio");
  const user = userMap.value.get(userId.value);
  user.videoStream.removeTrack(localStream.getAudioTracks()[0]);
  user.videoStream.addTrack(audioStream.getAudioTracks()[0]);
  user.isMuted = false;
  startAudioAnalysis(user);
};
//关闭麦克风
const micOff = () => {
  userMap.value
    .get(userId.value)
    .videoStream.getAudioTracks()
    .forEach((track) => track.stop());
  userMap.value.get(userId.value).isMuted = true;
};
//屏幕共享开关
const toggleShare = () => {
  shareSwitch.value = !shareSwitch.value;
  if (shareSwitch.value) {
    shareOn();
    bdcSdpMessage("shareOn");
  } else {
    shareOff();
    bdcSdpMessage("shareOff");
  }
};
//开启屏幕共享
const shareOn = async () => {
  if (cameraSwitch.value) {
    cameraSwitch.value = !cameraSwitch.value;
    cameraOff();
  }
  const shareStream = await navigator.mediaDevices.getDisplayMedia({
    video: true,
  });
  pcTrackChange(shareStream, "video");
  const user = userMap.value.get(userId.value);
  user.videoStream.removeTrack(localStream.getVideoTracks()[0]);
  user.videoStream.addTrack(shareStream.getVideoTracks()[0]);
  user.isStreaming = 2;
};
//关闭屏幕共享并用摄像头替换推流媒体
const shareOff = async () => {
  userMap.value
    .get(userId.value)
    .videoStream.getTracks()
    .forEach((track) => {
      if (track.kind === localStream.getVideoTracks()[0].kind) {
        track.stop();
      }
    });
  userMap.value.get(userId.value).isStreaming = 0;
};
//显示成员列表按钮
const userListBtn = () => {
  isMemberListVisible.value = !isMemberListVisible.value;
};
//屏幕布局切换
const toggleGrid = () => {
  gridStyle.value = (gridStyle.value + 1) % 3;
};

//邀请加入按钮
const inviteBtn = () => {
  //邀请成员逻辑
  const currentRoom = roomStore.currentRoom;
  if (!currentRoom) {
    ElMessage.error("无法获取当前房间信息");
    return;
  }

  const roomId = currentRoom.roomId;
  const password = currentRoom.password || "";

  // 生成邀请链接
  let inviteUrl;
  if (password) {
    inviteUrl = `${window.location.origin}/study-room?roomId=${roomId}&password=${password}`;
  } else {
    inviteUrl = `${window.location.origin}/study-room/${roomId}`;
  }

  // 复制到剪切板
  navigator.clipboard
    .writeText(inviteUrl)
    .then(() => {
      ElMessage.success("邀请链接已复制到剪切板");
    })
    .catch((err) => {
      console.error("复制失败:", err);
      ElMessage.error("复制链接失败，请手动复制");

      // 提供备选方案，显示链接让用户手动复制
      ElMessageBox.alert(inviteUrl, "邀请链接", {
        confirmButtonText: "确定",
        type: "info",
      });
    });
};

//进入房间页面，调用音视频数据流初始化chunk
const handleEnterRoom = () => {
  //初始化用户列表，设置自己的昵称和头像
  userMap.value.set(userId.value, {
    userId: userId.value,
    nickname: userStore.currentUser.nickname + " (我)",
    avatarUrl: userStore.currentUser.avatarUrl,
    status: "studying",
    isMuted: true,
    isProcessingCandidate: false,
    peerConnection: null,
    candidateInfo: [],
    videoStream: null,
    isStreaming: 0,
    videoElement: null,
  });
  ElMessageBox.alert(
    "将开始检测摄像头与麦克风状态，如果您看到浏览器提示，请您点击“允许”",
    "提示",
    {
      confirmButtonText: "知道了",
      callback: () => {
        initVideoStream();
      },
    }
  );
};
//退出房间执行
const handleExitRoom = () => {
  userList.value.forEach(async (user) => {
    user.videoStream = null;
    if (user.peerConnection) {
      user.peerConnection.close();
    }
    bdcSdpMessage("exit");
    stopHeartBeat();
    MQClient.deactivate();
    roomStore.leaveRoom();
    router.push("/study-room");
  });
};
//本地音视频流初始化，完成后调用SDP连接chunk
const initVideoStream = async () => {
  //获取本地音视频设备流
  localStream = await navigator.mediaDevices.getUserMedia({
    // .getDisplayMedia({
    audio: true,
    video: true,
  });
  //检测可用性
  if (localStream == undefined) {
    ElMessageBox.alert(
      "您的浏览器不支持，或您拒绝了媒体权限。您将无法使用完整功能。如需开启权限，请尝试清除浏览器缓存后刷新页面",
      "提示",
      {
        confirmButtonText: "知道了",
      }
    );
  } else {
    //初始化完成后，根据默认配置修改音视频开关状态 TODO
    const user = userMap.value.get(userId.value);
    user.videoStream = localStream;
    localStream.getTracks().forEach((track) => track.stop());
  }
  //初始化SDP链接
  initSDP();
};
// 初始化SDP链接
const initSDP = async () => {
  let paramObj = {
    type: "init",
    fromUserId: userId.value,
    roomId: roomId.value,
  };
  // 执行发送
  initRoom(paramObj)
    .then(() => {
      console.log(`info: room initialized`);
      // 连接MQ消息队列
      initMQ();
    })
    .catch((error) => {
      console.error(`info: room initialize error: ${error}`);
      ElMessageBox.confirm("房间初始化失败，请稍后重试", "提示", {
        confirmButtonText: "确定",
        type: "error",
      }).then(() => {
        router.push("/study-room");
      });
    });
};
// 初始化MQ连接,并监听消息,收到消息发送给消息处理器
const initMQ = () => {
  console.log("info: linking to message server...");
  MQClient.activate(); // 激活MQ客户端
};
// 配置MQ客户端
MQClient.configure({
  brokerURL: `${import.meta.env.VITE_MQ_WEBSOCKET_URL}`,
  connectHeaders: {
    login: `${import.meta.env.VITE_MQ_USERNAME}`,
    passcode: `${import.meta.env.VITE_MQ_PASSWORD}`,
    host: `${import.meta.env.VITE_MQ_HOST}`,
  },
  onConnect: () => {
    console.log("info: linked");
    //绑定交换机与设定路由键
    MQClient.subscribe(
      `/exchange/room_exchange/*.${roomId.value}`,
      onBdcMessage
    );
    MQClient.subscribe(
      `/exchange/room_exchange/*.${roomId.value}.${userId.value}`,
      onMessage
    );
    //链接成功后发送join
    bdcSdpMessage("join");

    // 每30秒发送心跳消息发送
    sendHeartBeat();
  },
  onDisconnect: (err) => {
    console.log("warn: message server disconnected");
  },
  onStompError: (frame) => {
    console.error("error: broker: " + frame.headers["message"]);
    console.error("error: details: " + frame.body);
  },
  webSocketFactory: () => {
    return new WebSocket(`${import.meta.env.VITE_MQ_WEBSOCKET_URL}`);
  },
});
//消息处理器,从广播消息中分离出用户消息和SDP消息
const onBdcMessage = (message: any) => {
  let parseMsg = JSON.parse(message.body);
  if (parseMsg.type === "chat") {
    handleChatMessage(parseMsg);
  } else if (parseMsg.fromUserId == userId.value) {
    return;
  } else {
    // 处理SDP消息
    handleSdpMessage(parseMsg);
  }
};
//SDP消息处理器
const onMessage = (message: any) => {
  let parseMsg = JSON.parse(message.body);
  handleSdpMessage(parseMsg);
};
// SDP消息解析器
const handleSdpMessage = (message) => {
  console.log(`info: analyzing SDP message...type: ${message.type}`);
  if (message.type == "join") {
    joinHandle(message.fromUserId);
  } else if (message.type == "offer") {
    offerHandle(message.fromUserId, message.data);
  } else if (message.type == "answer") {
    answerHandle(message.fromUserId, message.data);
  } else if (message.type == "candidate") {
    candidateHandle(message.fromUserId, message.data);
  } else if (message.type == "muted") {
    userMap.value.get(message.fromUserId).isMuted = true;
  } else if (message.type == "unMuted") {
    userMap.value.get(message.fromUserId).isMuted = false;
    startAudioAnalysis(userMap.value.get(message.fromUserId));
  } else if (message.type == "camOn") {
    userMap.value.get(message.fromUserId).isStreaming = 1;
  } else if (message.type == "camOff") {
    userMap.value.get(message.fromUserId).isStreaming = 0;
  } else if (message.type == "shareOn") {
    userMap.value.get(message.fromUserId).isStreaming = 2;
  } else if (message.type == "shareOff") {
    userMap.value.get(message.fromUserId).isStreaming = 0;
  } else {
    console.log(`info: message droped: ${message.type}`);
  }
};
const handleChatMessage = (message) => {
  console.log(`info: received chat message from ${message.fromUserId}`);
};
// 加入处理器, 已进入用户作为发起端发送offer
async function joinHandle(fromUserId) {
  console.log(`info: user: ${fromUserId} joined, initializing connection...`);
  //处理用户列表
  createPeerConnection(fromUserId);
  await setUserInfo(fromUserId);
  swapVideoInfo(fromUserId);
}
// 远端Candidate处理器(先存进数组，然后批量处理)
const candidateHandle = async (toUserId, candidate) => {
  if (!candidate) {
    return;
  }
  console.log(
    `info: user: ${toUserId} candidate received: ${JSON.stringify(candidate)}`
  );
  const user = userMap.value.get(toUserId);
  // 这里已经确保初始化了candidateInfo 数组
  // 将 candidate 添加到队列
  user.candidateInfo.push(candidate);
  // 如果已经在处理队列，则直接返回
  if (user.isProcessingCandidate) {
    return;
  }
  // 设置处理状态
  user.isProcessingCandidate = true;
  // 处理队列中的所有 candidates
  const processCandidates = async () => {
    while (user.candidateInfo.length > 0) {
      const candidateToAdd = user.candidateInfo.shift();
      try {
        if (user.peerConnection && user.peerConnection.remoteDescription) {
          await user.peerConnection.addIceCandidate(candidateToAdd);
          console.log(
            `info: user: ${toUserId} candidate Added: ${JSON.stringify(
              candidateToAdd
            )}`
          );
        } else {
          // 如果 remoteDescription 还未设置，将 candidate 放回队列开头
          user.candidateInfo.unshift(candidateToAdd);
          // 等待一段时间后继续处理
          setTimeout(processCandidates, 50);
          return;
        }
      } catch (error) {
        console.error(
          `info: failed to add candidate for user ${toUserId}:`,
          error
        );
      }
      // 添加延迟以确保间隔
      if (user.candidateInfo.length > 0) {
        await new Promise((resolve) => setTimeout(resolve, 20));
      }
    }
    // 处理完成，重置处理状态
    user.isProcessingCandidate = false;
  };
  // 开始处理
  processCandidates();
}; // 接收端的answer处理
const answerHandle = (toUserId, answer) => {
  console.log(`info: received answer from user: ${toUserId}`);
  userMap.value
    .get(toUserId)
    .peerConnection.setRemoteDescription(new RTCSessionDescription(answer)); // 设置远端SDP
  console.log(`info: remote description set for user: ${toUserId}`);
};
// 发起端offer处理器
const offerHandle = async (toUserId, offer) => {
  console.log(
    `info: received offer from user: ${toUserId}, creating peer connection...`
  );
  createPeerConnection(toUserId);
  await setUserInfo(toUserId);
  await userMap.value
    .get(toUserId)
    .peerConnection.setRemoteDescription(new RTCSessionDescription(offer));
  console.log(`info: remote description set for user: ${toUserId}`);
  let answer = await userMap.value.get(toUserId).peerConnection.createAnswer();
  await userMap.value.get(toUserId).peerConnection.setLocalDescription(answer);
  //发送answer给发起端
  console.log(`info: sending answer to user: ${toUserId}`);
  sendSdpMessage("answer", answer, toUserId);
};
// 交换SDP和candidate
const swapVideoInfo = async (toUserId) => {
  console.log(`info: swapping video info with user: ${toUserId}`);
  console.log(`info: local description set for user: ${toUserId}`);
  let offer = await userMap.value.get(toUserId).peerConnection.createOffer();
  await userMap.value.get(toUserId).peerConnection.setLocalDescription(offer); // 将媒体信息设置到本地
  console.log(`info: sending offer to user: ${toUserId}`);
  sendSdpMessage("offer", offer, toUserId);
};
// 创建RTC连接对象并监听和获取condidate信息
const createPeerConnection = async (toUserId) => {
  console.log(`info: creating peer connection for user: ${toUserId}`);
  //这边的报错不要管
  let peerConnection = new RTCPeerConnection(RTCConfiguration);
  //添加本地音视频流到 PeerConnection
  localStream.getTracks().forEach((track) => {
    peerConnection.addTrack(track, localStream);
  });
  //先存ice获取的candidate信息，然后统一按间隔发送（防止消息拥塞）
  const candidateQueue = [];
  let isProcessing = false;
  //开始处理candidate信息队列
  const processCandidateQueue = () => {
    if (isProcessing || candidateQueue.length === 0) return;
    isProcessing = true;
    const candidate = candidateQueue.shift();
    sendSdpMessage("candidate", candidate, toUserId);
    if (candidateQueue.length > 0) {
      setTimeout(() => {
        isProcessing = false;
        processCandidateQueue();
      }, 20);
    } else {
      isProcessing = false;
    }
  };
  //添加candidate信息到队列
  peerConnection.onicecandidate = (event) => {
    if (event.candidate) {
      candidateQueue.push(event.candidate);
      processCandidateQueue();
    }
  };
  // 监听远端音视频流
  peerConnection.ontrack = (event) => {
    nextTick(() => {
      const user = userMap.value.get(toUserId);
      if (user && event.streams[0]) {
        user.videoStream = event.streams[0];
        startAudioAnalysis(user);
      } else {
        console.log(`warnning: remote stream from user: ${toUserId} not set`);
      }
    });
  };
  //监听ice连接状态
  peerConnection.oniceconnectionstatechange = () => {
    console.log(
      `info: ice connection state changed to: ${peerConnection.iceConnectionState}`
    );
    //对方断开后更新用户列表并删除
    if (peerConnection.iceConnectionState === "disconnected") {
      peerConnection.close();
      ElMessage.info(`用户${userMap.value.get(toUserId).nickname}离开了自习室`);
      userMap.value.delete(toUserId);
    }
  };
  //为用户创建列表对象
  userMap.value.set(toUserId, {
    userId: toUserId,
    avatarUrl: "",
    nickname: "",
    status: "studying", // 用户状态
    peerConnection: peerConnection,
    candidateInfo: [],
    isProcessingCandidate: false,
    videoStream: null,
    isStreaming: 0,
    videoElement: null,
    isMuted: true,
  });
};
const setUserInfo = async (userId) => {
  const user = userMap.value.get(userId);
  await getUserInfo(userId)
    .then((res) => {
      user.avatarUrl = res.data.avatarUrl;
      user.nickname = res.data.nickname;
      // 初始化音频级别
      user.audioLevel = 0;
      user.audioContext = null;
      user.analyser = null;
      user.microphone = null;
    })
    .catch((error) => {
      console.error("Error fetching user info:", error);
    });
};
const getStatusType = (status: string) => {
  const typeMap: Record<string, string> = {
    studying: "success",
    focused: "warning",
    resting: "info",
    offline: "danger",
  };
  return typeMap[status] || "info";
};
const getStatusText = (status: string) => {
  const textMap: Record<string, string> = {
    studying: "学习中",
    focused: "专注中",
    resting: "休息中",
    offline: "离线",
  };
  return textMap[status] || "未知";
};
const focusOnUser = (userId) => {
  const container = document.getElementById(`${userId}-container`);
  if (container) {
    container.scrollIntoView({
      behavior: "smooth",
      block: "center",
      inline: "center",
    });

    // 添加高亮效果
    container.classList.add("focused");
    setTimeout(() => {
      container.classList.remove("focused");
    }, 2000);
  }
};
// 添加获取音频级别颜色的方法
const getAudioLevelColor = (user) => {
  if (user.isMuted) {
    return "rgba(255, 0, 0, 0.3)"; // 静音状态保持红色半透明背景
  }

  // 根据音频级别返回不同透明度的绿色背景
  const level = user.audioLevel || 0;
  // 透明度范围从 0.2 (微弱声音) 到 1.0 (高音量)
  const alpha = Math.min(0.2 + level * 0.8, 1.0);
  return `rgba(0, 255, 51, ${alpha})`; // 绿色背景，透明度随音量变化
};
// 音频分析函数
const startAudioAnalysis = (user) => {
  if (!user.videoStream || user.isMuted) return;
  // 如果已经存在分析器，先清理掉
  stopAudioAnalysis(user);
  try {
    // 创建音频上下文
    user.audioContext = new window.AudioContext();
    user.analyser = user.audioContext.createAnalyser();
    user.analyser.fftSize = 256;
    // 保存动画帧引用，以便后续可以取消
    user.audioAnimationFrameId = null;
    // 获取音频轨道
    const audioTracks = user.videoStream.getAudioTracks();
    if (audioTracks.length > 0) {
      user.microphone = user.audioContext.createMediaStreamSource(
        new MediaStream([audioTracks[0]])
      );
      user.microphone.connect(user.analyser);
      // 开始分析音频
      const updateAudioLevel = () => {
        // 检查用户是否仍然存在且未静音
        if (!user.analyser || user.isMuted || !userMap.value.has(user.userId)) {
          stopAudioAnalysis(user);
          return;
        }
        const dataArray = new Uint8Array(user.analyser.frequencyBinCount);
        user.analyser.getByteFrequencyData(dataArray);

        // 计算平均音量
        let sum = 0;
        for (let i = 0; i < dataArray.length; i++) {
          sum += dataArray[i];
        }
        const average = sum / dataArray.length;

        // 将音量标准化到 0-1 范围
        user.audioLevel = average / 255;

        // 继续下一帧更新
        user.audioAnimationFrameId = requestAnimationFrame(updateAudioLevel);
      };
      updateAudioLevel();
    }
  } catch (error) {
    console.error("Error starting audio analysis:", error);
  }
};
//停止音频分析
const stopAudioAnalysis = (user) => {
  if (user.audioAnimationFrameId) {
    cancelAnimationFrame(user.audioAnimationFrameId);
    user.audioAnimationFrameId = null;
  }
  if (user.analyser) {
    user.analyser.disconnect();
    user.analyser = null;
  }
  if (user.microphone) {
    user.microphone.disconnect();
    user.microphone = null;
  }
  if (user.audioContext) {
    user.audioContext.close().catch((err) => {
      console.error("Error closing audio context:", err);
    });
    user.audioContext = null;
  }
  // 重置音频级别
  user.audioLevel = 0;
};
</script>
<style>
.main-video {
  position: relative;
  height: 100%;
  width: 100%;
  display: flex;
  flex-direction: column;
  align-items: center;
  justify-content: space-between;
  overflow: hidden;
  /* transition: max-height 0.3s ease; */
}
.append-video {
  display: flex;
  height: 0;
  flex-direction: column;
  align-items: center;
  justify-content: center;
  position: relative;
  bottom: 0;
  width: 100%;
  overflow: visible;
}
.video-placeholder {
  width: 100%;
  height: 100%;
  grid-auto-rows: 50%; /* 设置每行的高度，即视频高度 */
  grid-gap: 16px; /* 视频之间的间距 */
  height: 100%; /* 根据父容器高度自适应 */
  overflow-x: auto; /* 允许水平滚动 */
  overflow-y: hidden;
  padding: 16px;
  box-sizing: border-box;
}
.video-placeholder p {
  margin-top: 12px;
  opacity: 0.8;
}
.video-controls {
  display: flex;
  position: absolute;
  /* height: 4rem; */
  width: 100%;
  justify-content: space-around;
  align-items: center;
  bottom: 1rem;
  transition: position 0.3s ease;
  padding: 12px;
  box-sizing: border-box;
}
.video-controls::before {
  content: "";
  position: absolute;
  top: 0;
  left: 0;
  width: 100%;
  height: 100%;
  background: radial-gradient(circle at center, #00000066 60%, transparent 80%);
  z-index: 0;
  pointer-events: none; /* 确保不阻挡点击事件 */
  border-radius: 12px;
}
.participants-videos {
  display: flex;
  position: absolute;
  width: 100%;
  bottom: 4.5rem;
  background: rgba(0, 0, 0, 0.3);
  padding: 20px;
  padding-bottom: 40px;
  gap: 16px;
  overflow-x: auto;
  flex-wrap: nowrap;
}
.participant-video {
  position: relative;
  bottom: 0;
  flex-shrink: 0;
  text-align: center;
  cursor: pointer;
}
.video-status,
.audio-status {
  position: absolute;
  width: 24px;
  height: 24px;
  border-radius: 50%;
  display: flex;
  align-items: center;
  justify-content: center;
}

.video-status {
  top: 4px;
  left: 4px;
  background: rgba(0, 0, 0, 0.5);
}

.video-icon {
  font-size: 14px;
  color: white;
}
.video-frame {
  width: 80px;
  height: 80px;
  background: rgba(0, 0, 0, 0.3);
  border-radius: 8px;
  display: flex;
  align-items: center;
  justify-content: center;
  margin-bottom: 8px;
  position: relative;
}
.audio-status {
  bottom: 4px;
  right: 4px;
  background: rgba(255, 0, 0, 0.3);
}

.audio-status.audio-active {
  background: #00ff336f;
}
.video-status.video-active {
  background: #00ff336f;
}

.audio-icon {
  font-size: 14px;
  color: white;
}

.participant-info {
  color: white;
}
.participant-name {
  display: block;
  font-size: 12px;
  margin-bottom: 4px;
}

/* 淡入淡出动画 */
.list-fade-enter-active,
.list-fade-leave-active {
  transition: opacity 0.5s ease;
}

.list-fade-enter-from,
.list-fade-leave-to {
  opacity: 0;
}
@media (max-width: 768px) {
  .center-panel {
    order: -1;
    border-radius: 0;
  }
}

.single-row {
  width: 100%;
  height: 100%;
  display: flex;
  justify-content: start;
  align-items: center;
  overflow-x: auto;
  overflow-y: hidden;
  white-space: nowrap;
  padding: 1rem;
  box-sizing: border-box;
}

.single-row .remote-video-container {
  width: 100%;
  height: 100%;
  max-width: 100%;
  object-fit: contain;
  flex: 0 0 auto;
  margin-right: 0;
}
/* .single-row .remote-video-container:last-child {
  margin-right: 0;
} */
.double-row {
  width: 100%;
  height: 100%;
  display: flex;
  flex-direction: column;
  flex-wrap: wrap; /* 纵向排列，自动换行 */
  overflow-x: auto;
  overflow-y: hidden;
  gap: 1rem; /* 视频之间的间距 */
  padding: 1rem;
  box-sizing: border-box;
  align-content: flex-start;
}

.double-row .remote-video-container {
  flex: 0 0 calc(50% - 1rem); /* 两个视频，考虑 gap 影响 */
  height: 50%;
  min-width: 50%; /* 设置最小宽度，确保视频不会过小 */
  aspect-ratio: 16 / 9;
  object-fit: contain;
  border-radius: 8px;
}

.triple-row {
  width: 100%;
  height: 100%; /* 根据需要调整高度 */
  display: flex;
  flex-direction: column;
  flex-wrap: wrap; /* 允许纵向排列并自动换行 */
  overflow-x: auto; /* 允许水平滚动 */
  overflow-y: hidden; /* 隐藏垂直滚动条 */
  gap: 1rem; /* 视频之间的间距 */
  padding: 1rem;
  box-sizing: border-box;
  align-content: flex-start; /* 确保内容在容器中靠上对齐 */
}

.triple-row .remote-video-container {
  flex: 0 0 calc(33.33% - 1rem); /* 每个视频占据三分之一宽度并考虑间距 */
  height: 33.3%; /* 设置固定高度，确保三行显示 */
  min-width: 33.3%; /* 设置最小宽度，确保视频不会过小 */
  aspect-ratio: 16 / 9;
  object-fit: contain; /* 保持视频比例并覆盖整个区域 */
  border-radius: 8px;
}
.remote-video {
  height: 100%;
  width: 100%;
  display: block;
  object-fit: contain;
  border-radius: 8px;
  background-color: #00000040;
}
.no-stream {
  width: 100%;
  height: 100%;
  display: flex;
  flex-direction: column;
  justify-content: center;
  align-items: center;
  background-color: #00000040;
  color: white;
}

.no-stream p {
  margin-top: 10px;
  font-size: 16px;
}
.remote-video-container {
  position: relative;
}

.video-overlay {
  position: absolute;
  top: 0;
  left: 0;
  width: 100%;
  height: 100%;
  pointer-events: none; /* 不影响视频的交互 */
}

.video-status-overlay,
.audio-status-overlay {
  position: absolute;
  width: 30px;
  height: 30px;
  border-radius: 50%;
  display: flex;
  align-items: center;
  justify-content: center;
  background: rgba(0, 0, 0, 0.5);
}

.video-status-overlay {
  top: 10px;
  right: 10px;
  background: rgba(0, 0, 0, 0.5);
}

.audio-status-overlay {
  bottom: 10px;
  right: 10px;
  background: rgba(255, 0, 0, 0.3);
}

.audio-status-overlay.audio-active {
  background: #00ff336f;
}
.video-status-overlay.video-active {
  background: #00ff336f;
}

.overlay-icon {
  font-size: 18px;
  color: white;
}

.remote-video-container.focused {
  outline: 3px solid #409eff;
  outline-offset: 2px;
  border-radius: 8px;
  transition: outline 0.3s ease;
}
.room-participants-display {
  position: absolute;
  top: 1.5rem;
  left: 1.5rem;
  z-index: 10;
  background: rgba(0, 0, 0, 0.5);
  border-radius: 16px;
  padding: 4px 8px;
  display: flex;
  align-items: center;
  color: white;
  font-size: 14px;
}

.participants-icon {
  margin-right: 4px;
  font-size: 16px;
}
</style>
