// 核心 AI 请求逻辑
import type OpenAI from "openai";
import type { ChatHistoryWin } from "@/types";
import { MODEL_CONFIG } from "@/config/deepseekConfig";
import { useGeneralDataStore, useChatDataStore } from "@/store";
import { useChatSession, useChatScroll } from "@/hooks";

export function useAIStream() {
  const chatDataStore = useChatDataStore();
  const generalDataStore = useGeneralDataStore();
  const { saveCurrentSession } = useChatSession();
  const { isNearBottom } = useChatScroll();
  const abortController = ref<AbortController | null>(null);

  // 流式AI请求
  const handleAIStream = async (params: {
    messages: ChatHistoryWin[];
    openai: OpenAI;
    targetIndex: number;
    model?: string;
    isRegenerate?: boolean;
    oldData?: ChatHistoryWin;
    // assistantRegenerateChat?: ChatHistoryWin[];
  }) => {
    if (abortController.value) {
      abortController.value.abort();
      abortController.value = null;
    }

    // 清除暂停
    generalDataStore.isPause = false;

    // 每次更新当前会话时间
    const temporaryData = chatDataStore.categorizedChats.find(
      (item) => item.thisChats[0].id === chatDataStore.currentChatId
    );
    if (temporaryData) temporaryData.thisChats[0].createdAt = new Date();

    // 准备助手消息
    const messageId = Date.now().toString();
    abortController.value = new AbortController();
    chatDataStore.generatingMessageId = messageId;

    // 处理重新生成逻辑
    if (params.isRegenerate && params.oldData) {
      const newMessage = {
        role: "assistant" as const,
        content: "",
        id: messageId,
        isLoading: true,
        isGenerating: true,
      };

      const updatedRegenerateChat = [
        ...(params.oldData.regenerateChat || [params.oldData]),
        newMessage,
      ];

      chatDataStore.chatHistoryWin[params.targetIndex] = {
        ...newMessage,
        regenerateChat: updatedRegenerateChat,
      };

      // 设置当前活动索引为最新消息
      generalDataStore.regenerateChatNewIndex =
        updatedRegenerateChat.length - 1;
    } else {
      // 普通生成逻辑
      chatDataStore.chatHistoryWin[params.targetIndex] = {
        role: "assistant",
        content: "",
        id: messageId,
        isLoading: true,
        isGenerating: true,
        // regenerateChat: [],
      };
    }

    try {
      const requestConfig: OpenAI.Chat.ChatCompletionCreateParamsStreaming = {
        model: params.model || "deepseek-chat",
        messages: params.messages.map((msg) => ({
          role: msg.role as "user" | "assistant",
          content: msg.content,
        })),
        // @ts-ignore
        stream: true,
        ...MODEL_CONFIG,
      };

      const streamRes = await params.openai.chat.completions.create(
        requestConfig,
        {
          signal: abortController.value.signal,
        }
      );

      let accumulatedContent = "";
      if (!streamRes) return;

      // 获取当前生成的消息（对于重新生成，需要找到正在生成的消息）
      let generatingMessage = chatDataStore.chatHistoryWin[params.targetIndex];
      if (params.isRegenerate) {
        const regenerateChat =
          chatDataStore.chatHistoryWin[params.targetIndex].regenerateChat;
        generatingMessage =
          regenerateChat?.[regenerateChat.length - 1] || generatingMessage;
      }

      for await (const chunk of streamRes) {
        if (generalDataStore.isPause) break;

        const content = chunk.choices[0]?.delta?.content || "";
        if (content) {
          // 没拿到容器时默认跟随
          const shouldStick = !generalDataStore.sessionBottomRef || isNearBottom(generalDataStore.sessionBottomRef);
          generatingMessage.isLoading = false;
          chatDataStore.chatHistoryWin[params.targetIndex].isLoading = false;
          accumulatedContent += content;

          // 更新消息内容
          generatingMessage.content = accumulatedContent;

          // 如果是重新生成，还需要更新主消息的内容
          if (params.isRegenerate) {
            chatDataStore.chatHistoryWin[params.targetIndex].content =
              accumulatedContent;
          } else {
            chatDataStore.chatHistoryWin[params.targetIndex].content =
              accumulatedContent;
          }
          // 只在需要跟随时，更新 DOM 后再滚到底
          if (shouldStick) {
            await nextTick();
            generalDataStore.scrollToBottom();
          }
        }
      }
      // 非暂停状态下才更新最终格式化内容
      if (!generalDataStore.isPause) {
        generatingMessage.isGenerating = false;

        if (params.isRegenerate) {
          // 重新生成时，将生成的消息设置为当前消息
          chatDataStore.chatHistoryWin[params.targetIndex] = {
            ...generatingMessage,
            regenerateChat:
              chatDataStore.chatHistoryWin[params.targetIndex].regenerateChat,
          };
          console.log(
            "最后结果 ===>",
            chatDataStore.chatHistoryWin[params.targetIndex]
          );
        } else {
          chatDataStore.chatHistoryWin[params.targetIndex] = {
            ...chatDataStore.chatHistoryWin[params.targetIndex],
            isGenerating: false,
          };
        }

        await saveCurrentSession();
      }
    } catch (error) {
      // if (!error.message.includes("abort"))
      if (!(error as Error).message.includes("abort")) {
        console.error("AI请求错误:", error);
        ElMessage({
          showClose: true,
          message: `AI请求错误:${error}`,
          type: "error",
          plain: true,
        });

        // 获取当前生成的消息
        let generatingMessage =
          chatDataStore.chatHistoryWin[params.targetIndex];
        if (params.isRegenerate) {
          const regenerateChat =
            chatDataStore.chatHistoryWin[params.targetIndex].regenerateChat;
          generatingMessage.content =
            error instanceof Error ? `错误: ${error.message}` : "请求失败";
          generatingMessage =
            regenerateChat?.[regenerateChat.length - 1] || generatingMessage;
        }

        generatingMessage.content =
          error instanceof Error ? `错误: ${error.message}` : "请求失败";
        generatingMessage.isGenerating = false;
        generatingMessage.isLoading = false;
      }
    } finally {
      abortController.value = null;
      // 注掉此代码是为了让 “继续生成” 按钮出现
      // chatDataStore.generatingMessageId = null;

      const genIndex = chatDataStore.chatHistoryWin.findIndex(
        (msg) => msg.isGenerating
      );
      if (genIndex !== -1) {
        chatDataStore.chatHistoryWin[genIndex].isGenerating = false;
        chatDataStore.chatHistoryWin[genIndex].isLoading = false;
      }

      generalDataStore.scrollToBottom();
    }
  };

  const abortRequest = () => {
    if (abortController.value) {
      abortController.value.abort();
      abortController.value = null;
    }
  };

  return {
    abortController,
    handleAIStream,
    abortRequest,
  };
}
