<template>
  <div>
    <div>
      <div class="btn" @click="initRtc">初始化</div>
      <div>
        <div v-for="(item, index) in msgHistory" :key="index">
          {{ item.value }}
        </div>
      </div>
    </div>
  </div>
</template>

<script lang="ts" setup>
import { ref } from "vue";
import VERTC, {
  onUserJoinedEvent,
  onUserLeaveEvent,
  MediaType,
  StreamRemoveReason,
} from "@volcengine/rtc";
import RtcClient from "@/utils/rtcClient";
import config from "@/config/index.ts";
// const config = {
//   appId: "677f2533fcf90101a468495e",
//   token:
//     "001677f2533fcf90101a468495eUAAcyqAFRs2EZ8YHjmcNAFJ0Y1Rlc3RSb29tMDENAFJ0Y1Rlc3RVc2VyMDMGAAAAxgeOZwEAxgeOZwIAxgeOZwMAxgeOZwQAxgeOZwUAxgeOZyAACL0HA5Qs8q8GLOHgWVHlBrHwpJ0K2yuI0l/jGc93hJI=",
//   userId: "RtcTestUser03",
//   roomId: "RtcTestRoom01",
// };
async function initRtc() {
  const { AppId: appId, Token: token, UserId: userId, RoomId: roomId } = config;
  RtcClient.createEngine({
    appId,
    token,
    userId,
    roomId,
  });
  RtcClient.addEventListeners({
    [VERTC.events.onUserJoined]: (e: onUserJoinedEvent) => {
      console.log("用户进房", e);
    }, // 用户进房回调
    [VERTC.events.onUserLeave]: (e: onUserLeaveEvent) => {
      console.log("用户离房", e);
    }, // 用户离房回调
    [VERTC.events.onUserPublishStream]: (e: { userId: string; mediaType: MediaType }) => {
      console.log("用户发布流", e);
    }, // 用户发布音视频流回调
    [VERTC.events.onUserUnpublishStream]: (e: {
      userId: string;
      mediaType: MediaType;
      reason: StreamRemoveReason;
    }) => {
      console.log("用户取消发布流", e);
    }, // 用户取消发布音视频流回调
    [VERTC.events.onUserStartAudioCapture]: (e: { userId: string }) => {
      console.log("用户开启音频采集", e);
    }, // 用户开启音频采集回调
    [VERTC.events.onUserStopAudioCapture]: (e: { userId: string }) => {
      console.log("用户关闭音频采集", e);
    }, // 用户关闭音频采集回调
    [VERTC.events.onRoomBinaryMessageReceived]: (event: {
      userId: string;
      message: ArrayBuffer;
    }) => {
      const { message } = event;
      const decoder = new TextDecoder("utf-8");
      const str = decoder.decode(message);
      const start = str.indexOf("{");
      const context = JSON.parse(str.substring(start, str.length)) || {};
      const data = context.data?.[0] || {};
      if (data) {
        const { text: msg, definite, userId: user, paragraph } = data;
        if ((window as any)._debug_mode) {
          setHistoryMsg({ msg, user, paragraph, definite });
        } else {
          const isAudioEnable = RtcClient.getAudioBotEnabled();
          if (isAudioEnable) {
            setHistoryMsg({ text: msg, user, paragraph, definite });
          }
        }
        setCurrentMsg({ msg, definite, user, paragraph });
      }
    }, // 用户关闭音频采集回调
  });

  await addRoom();
  handleAIGCModeStart(roomId, userId);
}

async function addRoom() {
  const { AppId: appId, Token: token, UserId: userId, RoomId: roomId } = config;

  await RtcClient.joinRoom({ token, username: userId });
  const audioInputs = await RtcClient.getDevices();
  const defaultUsableDevice = audioInputs?.[0];

  await RtcClient.startAudioCapture(defaultUsableDevice);
}

// 是否开启AI
let isAIGCEnable = ref<Boolean>(false);
const handleAIGCModeStart = async (roomId: string, userId: string) => {
  if (isAIGCEnable.value) {
    await RtcClient.stopAudioBot(roomId, userId);

    await RtcClient.startAudioBot(roomId, userId, config);
  } else {
    await RtcClient.startAudioBot(roomId, userId, config);
  }
  isAIGCEnable.value = true;
};

const isAITalking = ref<Boolean>(false);
const isUserTalking = ref<Boolean>(false);
const localUser = ref({
  userId: config.UserId,
  userName: config.UserName,
});
const msgHistory = ref<any>([]);

function setHistoryMsg(obj) {
  const paragraph = obj.paragraph;
  const aiTalking = obj.user === config.BotName;
  const userTalking = obj.user === localUser.value.userId;
  if (paragraph) {
    if (isAITalking.value) {
      isAITalking.value = false;
    }
    if (isUserTalking.value) {
      isUserTalking.value = false;
    }
  } else {
    if (isAITalking.value !== aiTalking) {
      isAITalking.value = aiTalking;
    }
    if (isUserTalking.value !== userTalking) {
      isUserTalking.value = userTalking;
    }
    if (obj.user === localUser.value.userId) {
      return;
    }
  }
  /** If the current speaker is a user, and the previous record is AI, and it is not a sentence, then interrupt */
  if (userTalking) {
    const lastMsg: any = msgHistory.value[msgHistory.value.length - 1];
    const isAI = lastMsg.user !== localUser.value.userId;
    if (!lastMsg.paragraph && isAI) {
      lastMsg.isInterrupted = true;
      msgHistory.value[msgHistory.value.length - 1] = lastMsg;
    }
  }
  addMsgWithoutDuplicate(msgHistory.value, {
    user: obj.user,
    value: obj.text,
    time: new Date().toLocaleString(),
    isInterrupted: false,
    paragraph,
  });
}

function setCurrentMsg(obj) {
  const { user, ...info } = obj;
  // state.currentConversation[user || state.localUser.userId] = info;
}

function addMsgWithoutDuplicate(arr, added) {
  if (arr.length) {
    const last = arr.at(-1)!;
    const { user, value, isInterrupted } = last;
    if (
      user === added.user &&
      (added.value.startsWith(value) || value.startsWith(added.value))
    ) {
      arr.pop();
      added.isInterrupted = isInterrupted;
    }
  }
  arr.push(added);
}
</script>
<style lang="scss" scoped></style>
