using System.Text.Json;
using MultiAgentsClient.Service.WorkflowEngine.BaseNodes;
using MultiAgentsClient.Shared;
using MultiAgentsShared;
using OpenAI.ObjectModels;
using static MultiAgentsServer.Scripts.Shared.ModelConsts;

namespace MultiAgentsClient.Service.WorkflowEngine.BuiltInNodes;

[NodeGraphPortDef(1, 1)]
public class StrToListNode : ServiceDependentNode<NodeLlmRequest, NodeLlmResponse>
{
    [NodeGraphParam] public string howToExtract; //how would you like it to extract?
    [NodeGraphParam] public string inputString; //raw message

    [NodeGraphParam] public LlmModels ModelEnum;
    [NodeGraphOutput] public List<string> listOfString;

    private string strToListInstruction => "Your job to turn a input string into a list of strings, based on the extraction instruction that users wants. \n" +
        "Its very important that its in json format, e.g.:\n" +
        "[\r\n" +
        "  \"Item 1\",\r\n" +
        "  \"Item 2\",\r\n" +
        "  \"Item 3\",\r\n" +
        "  \"Item 4\"\r\n" +
        "]\r\n";

    protected override void SendRequestToService()
    {
        var msgsSimp = new List<LlmMessageSimp>()
        {
            new LlmMessageSimp(StaticValues.ChatMessageRoles.System, strToListInstruction),
            new LlmMessageSimp(StaticValues.ChatMessageRoles.User,
            $"Here is how user would like you to extract list of string from input\n:" +
            $"{howToExtract}\n" +
            $"And Here is the input string:\n" +
            $"{inputString}")
        };

        List<LlmMessageSimp> MessagesRuntime = InsertVariable(msgsSimp);


        var nodeLlmRequest = new NodeLlmRequest(Id)
        {
            temperature = 1.0f,
            gptModel = modelInfos[ModelEnum].ModelName,
            messages = []
        };

        foreach (var message in MessagesRuntime)
        {
            nodeLlmRequest.messages.Add(SerializableObjectBase.ConvertToJson(message));
        }

        EventBus.Publish(nodeLlmRequest);
    }

    protected override NodeOutcome ServiceResponseHandler(NodeLlmResponse cmd)
    {
        Debug.LogLine("StrToListNodeResponse:" + cmd.llmResult);
        try
        {
            listOfString = JsonSerializer.Deserialize<List<string>>(cmd.llmResult);
            runtimeContext.SetVar(Id, () => listOfString);
        }
        catch
        {
            Debug.LogError("Failed");

            listOfString = new List<string>();
            runtimeContext.SetVar(Id, () => listOfString);
        }
        return new NodeOutcome(NodeOutcomeStatus.Success, 0);
    }
}
