import { Annotation } from "@langchain/langgraph";
import { TokenUsage } from "../../llms/modelUsage";
import { IWorkflowNode, IWorkflowNodeEndEventArgs } from "../IWorkflowNode";
import { SendMessageData } from "../sendMessageData";
import { baseInputVars } from "./baseInputVars";
import { ILLMModelChunk } from "../../llms/llmModel";
import { DBService } from "../../services/dbService";

export abstract class BaseNode implements IWorkflowNode {
    OnMessage: (sender: IWorkflowNode, args: SendMessageData) => void;
    OnEnd: (sender: IWorkflowNode, args: IWorkflowNodeEndEventArgs) => void;
    protected OnInit() {
    }

    constructor() {
        this.inputVars.State = {
            convID: "",
            userID: "",
            userInput: ""
        }
        this.OnInit?.();
    }

    workflowID: string;
    abstract name: string;
    canEidt: boolean = true;
    get ResultField() {
        return this.name + "Result";
    }
    usage?: TokenUsage = new TokenUsage();
    inputVars = Annotation.Root({
        ...baseInputVars.spec
    })
    outputVars = Annotation.Root({});
    protected storeMsgSvr = DBService.StoreMessageService;
    abstract Run(): Promise<any>;
    protected OnModelStreamOut(chunk: ILLMModelChunk, receiver: "chat" | "editor" | "chat_think" = "chat") {
        this.OnMessage?.(this, new SendMessageData(this.inputVars.State.convID, this.name, receiver, chunk));
    }

    Stop(): void {

    }
}