import { computed, ref } from 'vue';
import { Ollama } from "@langchain/community/llms/ollama";
import { ChatPromptTemplate } from "@langchain/core/prompts";
import { StringOutputParser } from "@langchain/core/output_parsers";
import { RunnableSequence } from "@langchain/core/runnables";
import { useChatMessageStore } from '@/stores/index'
import type { RunnableConfig } from "@langchain/core/runnables";

import type { ChatMessage } from '@/type/index'

const { getChatMessage, addChatMessage } = useChatMessageStore()

export function useChatService() {
    const chatHistory = computed(() => getChatMessage());
    const isLoading = ref(false);
    const currentResponse = ref('');
    let abortController: AbortController | null = null;

    const ollama = new Ollama({
        baseUrl: "http://localhost:11434",
        model: "qwen2",
    });

    const chatPrompt = ChatPromptTemplate.fromMessages([
        ["system", "You are a helpful AI assistant."],
        ["user", "{input}"],
    ]);

    const chain = RunnableSequence.from([
        chatPrompt,
        ollama,
        new StringOutputParser(),
    ]);

    async function chat(input: string): Promise<void> {
        if (!input) {
            console.error("请输入内容");
            return;
        }
        isLoading.value = true;
        currentResponse.value = '';
        chatHistory.value.push({ role: 'user', content: input, created_at: Date.now(), id: Math.random().toString() });
        saveChatHistory(chatHistory.value)
        const fullInput = chatHistory.value
            .map(msg => `${msg.role.toUpperCase()}: ${msg.content}`)
            .join('\n');

        try {
            abortController = new AbortController();
            const stream = await chain.stream({
                input: fullInput,
            }, { signal: abortController.signal } as RunnableConfig & { signal: AbortSignal });

            for await (const chunk of stream) {
                currentResponse.value += chunk;
            }

            chatHistory.value.push({ role: 'ai', content: currentResponse.value.trim(), created_at: Date.now(), id: Math.random().toString() });
            saveChatHistory(chatHistory.value)
        } catch (error: unknown) {
            if (error instanceof Error) {
                if (error.name === 'AbortError') {
                    console.log('Request was aborted');
                } else {
                    console.error("Error during chat:", error.message);
                }
            } else {
                console.error("An unknown error occurred:", error);
            }
        } finally {
            isLoading.value = false;
            currentResponse.value = '';
            abortController = null;
        }
    }

    function stopChat() {
        if (abortController) {
            chatHistory.value.push({ role: 'ai', content: currentResponse.value.trim(), created_at: Date.now(), id: Math.random().toString() });
            saveChatHistory(chatHistory.value)

            abortController.abort();
            isLoading.value = false;
            currentResponse.value = '';
        }
    }

    const saveChatHistory = (chatHistory: ChatMessage[]) => {
        localStorage.setItem('chatMessage', JSON.stringify(chatHistory));
    }

    return {
        chatHistory,
        isLoading,
        currentResponse,
        chat,
        stopChat
    };
}