package com.llmops.demo;

import com.llmops.core.Context;
import com.llmops.core.Edge;
import com.llmops.core.WorkflowGraph;
import com.llmops.core.node.LLMNode;
import com.llmops.core.node.EndNode;
import com.llmops.core.node.StartNode;

import java.util.Collections;

public class Main {
    public static void main(String[] args) {
        WorkflowGraph workflow = new WorkflowGraph("1");
        Context context = new Context();
        context.setVariable("sessionId",3);

        workflow.addNode(new StartNode("data_input","data_input",Collections.singletonMap("input","你是谁")));

        workflow.addNode(new LLMNode("llm_inferenceA", "llm_inferenceA",Collections.singletonMap("prompt", "你是聊天助手回复我的问题:{data_input_output}")));
//        workflow.addNode(new LLMNode("llm_inferenceB", Collections.singletonMap("model", "gpt-B")));


        workflow.addNode(new EndNode("result_exporterA","result_exporterA", Collections.singletonMap("v", "A")));
//        workflow.addNode(new OutputNode("result_exporterB", Collections.singletonMap("v", "B")));

        workflow.addEdge(new Edge("data_input", "llm_inferenceA"));
//        workflow.addEdge(new Edge("data_loader", "llm_inferenceB", Edge.ConditionType.EQUAL,"B"));

        workflow.addEdge(new Edge("llm_inferenceA", "result_exporterA"));
//        workflow.addEdge(new Edge("llm_inferenceB", "result_exporterB"));

        workflow.validate();
//        workflow.execute();

        workflow.execute(context);

        System.out.println("Workflow execution completed");
    }
}