import { HumanMessage, SystemMessage } from "@langchain/core/messages";
import { PromptTemplate } from "@langchain/core/prompts";
import { IWorkflowNode } from "../IWorkflowNode";
import { LLMProvider } from "../../llms/modelProvider";
import { ILLMModelChunk, LLMModel } from "../../llms/llmModel";
import { SendMessageData } from "../sendMessageData";
import { baseInputVars } from "./baseInputVars";
import { DBService } from "../../services/dbService";
import { BaseNode } from "./baseNode";

export class LLMNode extends BaseNode {
    OnMessage: (sender: IWorkflowNode, args: SendMessageData) => void;
    name: string;
    workflowID: string;
    disableMessage = false;
    systemPrompt?: string;
    userMessages?: string[];
    inputVars = baseInputVars;
    model: LLMModel = LLMProvider.current.ZiJieDeepSeekV3;
    protected storeMsgSvr = DBService.StoreMessageService;
    protected withOutPromptFormat = false;


    get ResultField() {
        return this.name + "Result";
    }

    get NodeDefaultResult() {
        return this.outputVars.State[this.ResultField] as string;
    }

    private async init() {
        if (this.withOutPromptFormat) {
            return;
        }
        await this.formatSystemPrompt();
        await this.formatUserPrompts();
    }

    private takeVarsDefine(userPrompt: string) {
        const regex = /\{[^}]+\}/g;
        const result = userPrompt.match(regex) || [];
        return result.filter((x: string) => {
            if (x.startsWith("{{")) {
                return false;
            }
            return true;
        }).map(x => {
            return x.replace(/[{}]/g, "");
        })
    }

    // 提取公共函数用于格式化提示模板
    private async formatPromptTemplate(template: string): Promise<string> {
        const varDefines = this.takeVarsDefine(template);
        const prompt = new PromptTemplate({
            template: template,
            inputVariables: varDefines
        });
        let newInputVars = {};
        varDefines.forEach(x => {
            if (this.inputVars.State[x] != undefined) {
                newInputVars[x] = this.inputVars.State[x];
            }
        });
        if (Object.keys(newInputVars).length > 0) {
            return await prompt.format(newInputVars);
        }
        else {
            return template;
        }
    }

    private async formatSystemPrompt() {
        if (this.systemPrompt) this.systemPrompt = await this.formatPromptTemplate(this.systemPrompt);
    }

    private formatUserPrompts() {
        if (this.userMessages) {
            this.userMessages.forEach(async item => {
                item = await this.formatPromptTemplate(item as string);
            });
        }
    }

    private afterRunOutput() {
        this.outputVars.State = {
            [this.ResultField]: this.model.lastFullResponse
        };
        this.OnRunEnd();
        return this.outputVars.State;
    }
    private check() {
        if (!this.name) {
            throw new Error("name is required");
        }

        if (!this.model) {
            throw new Error("model is required");
        }
    }

    protected OnModelStreamOut(chunk: ILLMModelChunk) {
        if (this.disableMessage) {
            return;
        }
        this.OnMessage?.(this, new SendMessageData(this.inputVars.State.convID as string, this.name, "chat", chunk));
    }

    protected OnRunBegin() {

    }

    protected OnRunEnd() {

    }

    async Run() {
        try {
            this.check();
            await this.init();
            this.OnRunBegin();

            const msg = [
                this.systemPrompt ? new SystemMessage(this.systemPrompt) : new SystemMessage("")
            ];

            if (this.userMessages) {
                msg.push(...this.userMessages.map(x => {
                    return new HumanMessage(x)
                }));
            }

            const output = await this.model.stream(msg);
            if (this.OnMessage) {
                this.model.OnStreamOut = this.OnModelStreamOut.bind(this);
            }
            for await (const chunk of output) {
            }
            this.usage.add(this.model.totalUsage);
            // this.OnEnd?.(this, { modelResult: this.model.llmModel.lastFullResponse });
            return this.afterRunOutput();
        } catch (error) {
            console.log("error Run: ", this.name);
            throw error;
        }
    }

    Stop() {
        this.model.isAbort = true;
    }
}