"use client";
import React, { useEffect, useRef, useState } from "react";
import { Human } from "@/app/api/index";
import RtcStore from "@/app/store/rtc";
import VideoStore from "@/app/store/video";
import CurrentInput from "@/app/components/CurrentInput/page";
import useStore from "@/app/store/is_config";
import RollCaptions from "@/app/components/RollCaptions/page";
import useCaptionsStore from "@/app/store/captions";
import { getJson, get_token, people_stop } from "@/app/api/index";
import { message } from "antd";
import { generateUUID, LogMessage, writeLog, setupAutoStart } from "./index";
import { LLM, currentLLMFetchController, interruptLLM } from "./switch";
import TypingIndicator from "@/app/components/TypingIndicator/page";
import "./index.scss";
import {
  WebRTCManager,
  StreamConfigurations,
  StreamConfig,
  GetNextSwitchInfo,
} from "./pc";
import useVideoStreamsBinding from "./hooks/useVideoStreamsBinding";
import useActiveStreamRender from "./hooks/useActiveStreamRender";
import useSessionIdPostMessage from "./hooks/useSessionIdPostMessage";

interface TheDigitalHumanProps {
  width?: string;
  height?: string;
  position?: "left" | "right";
}
const TheDigitalHuman: React.FC<TheDigitalHumanProps> = ({
  width = "550px",
  height = "100%",
  position = "left",
}) => {
  const [file] = useState("config.json");
  // 是否开启文本对话
  const { is_txt, is_captions } = useStore();
  // 增加一个状态来跟踪LLM是否正在响应
  const [isLLMResponding, setIsLLMResponding] = useState(false);
  // 字幕状态管理
  const {
    startUserMessage,
    startAIResponse,
    updateAIResponse,
    completeConversation,
    limitConversations,
  } = useCaptionsStore();
  // 添加最后活动时间的引用
  const lastActivityRef = useRef<number>(Date.now());
  // 添加定时器的引用
  const inactivityTimerRef = useRef<NodeJS.Timeout | null>(null);
  // 添加一个标志来跟踪是否是由于超时导致的断开
  const isTimeoutDisconnect = useRef<boolean>(false);
  // 添加自动启动清理函数的引用
  const autoStartCleanupRef = useRef<(() => void) | null>(null);

  //  流和会话管理
  const [streams, setStreams] = useState<Map<string, MediaStream>>(new Map());

  const [activeStreamId, _setActiveStreamId] = useState<string>("default");
  const activeStreamIdRef = useRef<string>("default");

  const initialConfig =
    StreamConfigurations.find((c) => c.id === "default") || null;

  // 当前视频配置
  const [activeStreamConfig, _setActiveStreamConfig] =
    useState<StreamConfig | null>(initialConfig);
  const activeStreamConfigRef = useRef<StreamConfig | null>(initialConfig);
  const setActiveStreamConfig = (config: StreamConfig | null) => {
    _setActiveStreamConfig(config);
    activeStreamConfigRef.current = config;
  };

  // 设置当前视频
  const setActiveStreamId = (streamId: string) => {
    _setActiveStreamId(streamId);
    activeStreamIdRef.current = streamId;
    const config = StreamConfigurations.find((c) => c.id === streamId) || null;
    setActiveStreamConfig(config);
  };

  const [sessionIds, _setSessionIds] = useState<Map<string, number>>(new Map());
  const sessionIdsRef = useRef<Map<string, number>>(new Map());
  const setSessionIds = (
    updater: React.SetStateAction<Map<string, number>>
  ) => {
    _setSessionIds((prev) => {
      const newValue = typeof updater === "function" ? updater(prev) : updater;
      sessionIdsRef.current = newValue;
      return newValue;
    });
  };

  const TIMEREF = useRef<number>(0);
  const TIMERREF = useRef<NodeJS.Timeout | null>(null);
  const SWITCH_TO = useRef<boolean>(false);
  const diff_timer = useRef<NodeJS.Timeout | null>(null);
  const diff_timer_start = useRef<NodeJS.Timeout | null>(null);
  const diff_timer_end = useRef<NodeJS.Timeout | null>(null);

  const StartTimer = () => {
    TIMERREF.current = setInterval(() => {
      TIMEREF.current++;
    }, 1000);
  };

  const clear_timer = () => {
    if (diff_timer.current) {
      clearTimeout(diff_timer.current);
      diff_timer.current = null;
    }
    if (diff_timer_end.current) {
      clearTimeout(diff_timer_end.current);
      diff_timer_end.current = null;
    }
    if (diff_timer_start.current) {
      clearInterval(diff_timer_start.current);
      diff_timer_start.current = null;
    }
  };

  /**
   * @description 终止当前对话并存储
   * @returns 终止当前对话并存储结果
   */
  const ClearLLM = async () => {
    const aiContent =
      useCaptionsStore.getState().currentConversation?.aiMessage?.content!;
    interruptLLM(); // 关闭请求
    setIsLLMResponding(false); // 更新状态
    updateAIResponse(aiContent + "  (用户打断)  ", true); // 更新字幕
    completeConversation(); // 完成对话
  };

  /**
   * @description 切换人物
   * @param type 消息类型 STOP | START
   * @param config 当前Stream配置信息
   */
  const SwitchPeople = async (type: any, config: any) => {
    const { item, diff }: any = GetNextSwitchInfo(
      TIMEREF.current,
      type,
      config.id
    );
    console.log("******************************************************");
    console.log(type, config.id, item);
    console.log("******************************************************");
    if (!item) return;
    await new Promise(
      (resolve) => (diff_timer.current = setTimeout(resolve, diff * 1000))
    );
    requestAnimationFrame(() => setActiveStreamId(item.id));
  };

  const StartChat = (message: string) => {
    console.log(`%c 用户问题：${message}`, "font-size:20px ; color:red");
    startUserMessage(message);
    startAIResponse();
  };
  /**
   * @description 发送消息
   * @param type 消息类型 STOP | START
   * @param message 消息内容
   * @param isllm 是否使用LLM
   */
  const SendIsLLM = async (type: any, message: string = "", isllm = false) => {
    console.log("=====SendIsLLM=====");
    await ClearLLM();
    const config = activeStreamConfigRef.current!;
    await InterruptStream();
    clear_timer();
    let time: number = 0;
    if (message != "") StartChat(message);
    if (type == "START" && isllm) {
      setIsLLMResponding(true); // LLM开始响应
      LLM(
        message,
        async () => {
          diff_timer_start.current = setInterval(() => {
            time += 1;
          }, 1000);
          await SwitchPeople(type, config);
        },
        (content: string, isComplete: boolean) => {
          updateAIResponse(content, isComplete); // 字幕更新回调
          if (isComplete) setIsLLMResponding(false); // LLM响应完成
        }
      ).then((data) => {
        if (!data.status) return;
        console.log("还差：", data.duration - time);
        diff_timer_end.current = setTimeout(async () => {
          await SwitchPeople("STOP", activeStreamConfigRef.current);
          clear_timer();
        }, (data.duration - time) * 1000 + 4000);
        completeConversation();
        limitConversations(5);
      });
    } else await SwitchPeople(type, config);
  };

  /**
   * @description 中断当前所有流的对话
   * @returns 中断流结果
   */
  const InterruptStream = async () => {
    lastActivityRef.current = Date.now(); // 更新最后活动时间
    isTimeoutDisconnect.current = false; // 重置超时断开标志
    const ConfigStream = StreamConfigurations.filter((item) => item.sessionid);
    const sendpromises = ConfigStream.map(async (item) => {
      const res = await Human(
        { text: "", type: "echo", interrupt: true, sessionid: item.sessionid },
        item.url
      );
      return res;
    });
    const results = await Promise.all(sendpromises);
    return results;
  };

  // 添加断开连接的函数
  const disconnectAll = () => {
    isTimeoutDisconnect.current = true; // 设置超时断开标志
    if (wsRef.current) {
      // 关闭 WebSocket 连接
      wsRef.current.close();
      wsRef.current = null;
    }
    rtcManagerRef.current?.close(); // 关闭 RTCPeerConnection
    if (audioContext) audioContext.close(); // 关闭音频上下文
    if (abortControllerRef.current) abortControllerRef.current.abort(); // 中止所有请求
    people_stop();
    setpeoplestart(false);
    message.warning("由于长时间无活动，数字人已断开连接");
  };

  // 添加检查不活动的函数
  const checkInactivity = () => {
    const now = Date.now();
    const fiveHours = 8 * 60 * 60 * 1000; // 8小时转换为毫秒
    if (now - lastActivityRef.current >= fiveHours) {
      disconnectAll();
    }
  };

  // WebRTC
  const rtcManagerRef = useRef<WebRTCManager | null>(null);
  // 动态视频引用
  const videoRefs = useRef(new Map<string, HTMLVideoElement>());
  // 数字人id
  const { setsessionid } = RtcStore();
  const { peoplestart, setpeoplestart } = VideoStore();
  // 监听用户是否说话
  const [audioContext, setAudioContext] = useState<AudioContext | null>(null);
  // 语音转文字
  const abortControllerRef = useRef<AbortController | null>(null);
  // 标签
  const label_data = useRef<any>([]);
  const wakeup = useRef<any>([]);
  // websocket
  const wsRef = useRef<WebSocket | null>(null);
  const ws_token = useRef<string>("");
  const reconnectAttempts = useRef<number>(0);
  const maxReconnectAttempts = 5;
  const baseReconnectDelay = 1000; // 初始重连延迟1秒

  const startVoiceDetection = async () => {
    try {
      const context = new AudioContext({ sampleRate: 16000 });
      setAudioContext(context);
      const stream = await navigator.mediaDevices.getUserMedia({
        audio: {
          echoCancellation: true,
          noiseSuppression: true,
          autoGainControl: true,
        },
      });

      const source = context.createMediaStreamSource(stream);
      const scriptProcessor = context.createScriptProcessor(2048, 1, 1);
      scriptProcessor.onaudioprocess = function (event) {
        const inputData = event.inputBuffer.getChannelData(0);
        const inputData16 = new Int16Array(inputData.length);
        for (let i = 0; i < inputData.length; ++i) {
          inputData16[i] = Math.max(-1, Math.min(1, inputData[i])) * 0x7fff;
        }
        if (wsRef.current) {
          wsRef.current.send(inputData16.buffer);
        }
      };
      source.connect(scriptProcessor);
      scriptProcessor.connect(context.destination);
      const analyzer = context.createAnalyser();
      analyzer.fftSize = 512; // 降低 FFT 大小以提高响应速度
      analyzer.minDecibels = -90;
      analyzer.maxDecibels = -10;
      analyzer.smoothingTimeConstant = 0.2; // 降低平滑时间常数以提高灵敏度
      source.connect(analyzer);
    } catch (error) {
      message.error("请确保已允许浏览器访问麦克风");
    }
  };

  const handleSentenceEnd = async (data: any) => {
    const AsrTxt = data.payload.result;
    if (!AsrTxt) return;
    LogMessage("收到消息");
    console.log(`%c ${AsrTxt}`, "font-size:20px ; color:red");
    const isLabelled = label_data.current.some((item: any) =>
      AsrTxt.includes(item)
    );
    const trigger = wakeup.current.find((t: string) => AsrTxt.includes(t));
    const message = trigger ? AsrTxt.split(trigger)[1]?.trim() : null;
    if (message) {
      if (!isLabelled) {
        await SendIsLLM("START", AsrTxt, true);
      } else {
        StartChat(AsrTxt);
      }
      window.parent.postMessage({ type: "chat", value: AsrTxt }, "*");
    }
    setTimeout(() => {
      limitConversations(5); // 限制保留最近5组对话
    }, 500);
    writeLog("chat", `识别结果：${AsrTxt}`);
  };

  const WebSocketInit = () => {
    // 如果是因为超时断开的，则不进行重连
    if (isTimeoutDisconnect.current) return;
    if (reconnectAttempts.current >= maxReconnectAttempts) {
      message.error("WebSocket连接异常");
      return;
    }
    const socketUrl = `wss://nls-gateway.cn-shanghai.aliyuncs.com/ws/v1?token=${ws_token.current}`;
    wsRef.current = new WebSocket(socketUrl);
    wsRef.current.onopen = () => {
      LogMessage("WebSocket连接已打开");
      startVoiceDetection();
      reconnectAttempts.current = 0;
      var startTranscriptionMessage = {
        header: {
          appkey: "a6Kh25GRATHQmCKB",
          namespace: "SpeechTranscriber",
          name: "StartTranscription",
          task_id: generateUUID(),
          message_id: generateUUID(),
        },
        payload: {
          format: "pcm",
          sample_rate: 16000,
          enable_intermediate_result: true,
          enable_punctuation_prediction: true,
          enable_inverse_text_normalization: true,
        },
      };
      wsRef.current?.send(JSON.stringify(startTranscriptionMessage));
    };
    wsRef.current.onmessage = (event) => {
      const data = JSON.parse(event.data);
      if (data.header.name === "TranscriptionResultChanged") {
        console.log(data.payload.result);
        const trigger = wakeup.current.find((t: string) =>
          data.payload.result.includes(t)
        );
        // 只有在当前不是STOP状态，且未切换过的情况下，才切换到STOP
        if (
          trigger &&
          !SWITCH_TO.current &&
          activeStreamConfigRef.current?.type !== "STOP"
        ) {
          console.log("===========检测到唤醒词切换静默===========");
          SendIsLLM("STOP");
          SWITCH_TO.current = true;
        }
      }
      if (data.header.name === "SentenceEnd") {
        SWITCH_TO.current = false;
        handleSentenceEnd(data).catch((error) =>
          console.error("处理消息时出错:", error)
        );
      }
    };
    wsRef.current.onerror = (event) => {
      console.error("WebSocket错误:", event);
    };
    wsRef.current.onclose = () => {
      // 计算指数退避延迟
      const delay = Math.min(
        baseReconnectDelay * Math.pow(2, reconnectAttempts.current),
        30000
      ); // 最大延迟30秒
      reconnectAttempts.current++;
      console.log(
        `尝试第 ${reconnectAttempts.current} 次重连，延迟 ${delay}ms`
      );
      // 延迟后重新连接
      setTimeout(async () => {
        // 重新获取token
        try {
          const data: any = await get_token();
          ws_token.current = data.token;
          WebSocketInit();
        } catch (error) {
          console.error("获取token失败:", error);
        }
      }, delay);
    };
  };

  const init = async () => {
    try {
      const [configRes, tokenData]: [any, any] = await Promise.all([
        getJson(),
        get_token(),
      ]);
      if (configRes.code == 200) {
        const label = configRes.data.Config_Label.map((item: any) => item.name);
        label.push("返回");
        label_data.current = label;
        wakeup.current = configRes.data.Config_Trigger;
      }
      ws_token.current = tokenData.token;
    } catch (error) {
      console.error("初始化失败:", error);
    }
  };

  useEffect(() => {
    (async () => {
      await init();
      WebSocketInit(); // 确保WebSocket在组件加载时初始化
      const manager = new WebRTCManager({
        onStreamReady: (streamId, stream) => {
          setStreams((prevStreams) =>
            new Map(prevStreams).set(streamId, stream)
          );
        },
        onConnectionStateChange: (streamId, state) => {
          console.log(`流 ${streamId} 的连接状态: ${state}`);
          if (
            state === "disconnected" ||
            state === "failed" ||
            state === "closed"
          ) {
            // 只在主连接断开时显示错误和设置状态
            if (streamId === "default") {
              writeLog("error", "数字人主连接已断开");
              setpeoplestart(false);
            }
          }
        },
        onSessionId: (streamId, id) => {
          setSessionIds((prev) => new Map(prev).set(streamId, id));
        },
        onError: (errorMessage, advice) => {
          setpeoplestart(false);
          message.error(advice || errorMessage);
          writeLog("error", advice || errorMessage);
        },
      });
      rtcManagerRef.current = manager;
      await manager.start();
      // 组件挂载时创建新的 AbortController
      abortControllerRef.current = new AbortController();
      //
      inactivityTimerRef.current = setInterval(checkInactivity, 60000);
      // 设置每天9点自动启动WebSocket
      autoStartCleanupRef.current = setupAutoStart({
        onAutoStart: async () => {
          // 如果WebSocket存在但状态不是OPEN，先关闭它
          if (wsRef.current) {
            wsRef.current.close();
            wsRef.current = null;
          }
          const tokenData: any = await get_token();
          ws_token.current = tokenData.token;
          WebSocketInit();
          message.success("WebSocket已自动启动");
        },
        onError: (error: any) => {
          message.error("自动启动WebSocket失败");
        },
        isConnected: () => {
          return !!(
            wsRef.current && wsRef.current.readyState === WebSocket.OPEN
          );
        },
      });
    })();

    window.addEventListener("message", (event: any) => {
      if (event.data.type === "DigitWeb") {
        if (event.data.value.isStreaming) {
          updateAIResponse(event.data.value.content, false);
        } else {
          updateAIResponse(event.data.value.content, true);
          console.log("收到消息：", event);
          completeConversation();
          currentLLMFetchController?.abort();
          if (activeStreamConfigRef.current?.type == "START") {
            SendIsLLM("STOP");
          }
        }
      }
      if (event.data.type === "UserStart") {
        startUserMessage(event.data.value);
        startAIResponse();
      }
      if (event.data.type === "FirstSwitch") {
        console.log("==========web端首次切换========");
        SwitchPeople("START", activeStreamConfigRef.current);
      }
    });

    return () => {
      rtcManagerRef.current?.close();
      if (audioContext) audioContext.close();
      // 清除不活动检查定时器
      if (inactivityTimerRef.current) clearInterval(inactivityTimerRef.current);
      // 清除自动启动定时器
      if (autoStartCleanupRef.current) autoStartCleanupRef.current();
      // 清除所有定时器
      clear_timer();
      // 中止请求
      if (abortControllerRef.current) abortControllerRef.current.abort();
    };
  }, []);

  const canvasRef = useRef<HTMLCanvasElement>(null);

  // 使用 hook 绑定视频流与 video 标签，并处理默认流首次播放
  useVideoStreamsBinding({
    streams,
    videoRefs,
    onDefaultStreamPlaying: () => {
      setpeoplestart(true);
      writeLog("start", "数字人已开启");
      StartTimer();
    },
  });

  // 根据当前活动流渲染到 canvas
  useActiveStreamRender({
    activeStreamId,
    streams,
    videoRefs,
    canvasRef,
  });

  // 会话 ID 变化时设置并通知父窗口
  useSessionIdPostMessage({
    activeStreamId,
    sessionIds,
    onSessionIdChange: setsessionid,
  });

  return (
    <div
      className="TheDigitalHuman"
      style={{
        bottom: 0,
        [position]: 0,
        width: width,
        height: height,
        position: "absolute",
        background: "transparent",
      }}
    >
      {!peoplestart && (
        <div className="people_loading">
          <TypingIndicator />
          <br />
          数字人已关闭
        </div>
      )}
      {Array.from(streams.keys()).map((streamId) => (
        <video
          key={streamId}
          ref={(el) => {
            if (el) videoRefs.current.set(streamId, el);
            else videoRefs.current.delete(streamId);
          }}
          autoPlay
          playsInline
          muted
          style={{ display: "none" }}
        />
      ))}
      <canvas ref={canvasRef} style={{ width: "100%", height: "100%" }} />
      {is_captions && <RollCaptions />}
      {is_txt && (
        <CurrentInput
          placeholder="请输入消息..."
          onSend={(message) => {
            const _is = label_data.current.some((item: any) =>
              message.includes(item)
            );
            writeLog("chat", `用户文本输入：${message}`);
            if (_is) {
              StartChat(message);
            } else {
              SendIsLLM("START", message, true);
            }
            window.parent.postMessage({ type: "chat", value: message }, "*");
          }}
          disabled={false}
          isLLMResponding={isLLMResponding}
          onInterrupt={() => SendIsLLM("STOP")}
        />
      )}
    </div>
  );
};

export default TheDigitalHuman;
