using Microsoft.AspNetCore.SignalR;
using MultiAgentsClient.ChatDataStructures;
using MultiAgentsClient.Shared;
using MultiAgentsServer.Scripts.Hubs;
using MultiAgentsShared;
using OpenAI.ObjectModels.RequestModels;

namespace MultiAgentsServer.Scripts.Services;

public class LlmStreamSlice(int sliceIndex)
{
    public int sliceIndex = sliceIndex;
    public string? message = null;
    public List<ToolCall>? toolCallsOut = null;
}

/// <summary>
/// No toolcall for llm request
/// </summary>
public class ServerNodeLlmRequest
{
    public string SessionId = "";
    public string NodeId = "";
    public string ConnectionId = "";
    public List<LlmMessageSimp> messages = [];
    public string Model = "";
    public float temperature;
}

public abstract class LLmComService<TMessage>
{
    public abstract List<string> ModelList { get; }
    protected readonly SemaphoreSlim Semaphore;
    protected IEventBus EventBus;

    public LLmComService(IEventBus eventBusIn, IHubContext<SignalRWorkflowHub> hubContext, IConfiguration configuration)
    {
        EventBus = eventBusIn;

        IConfigurationSection maxConcurrentRequestsConfig = configuration.GetSection("MaxConcurrentRequests");
        int maxConcurrentRequests = Convert.ToInt32(maxConcurrentRequestsConfig.Value);
        Semaphore = new SemaphoreSlim(maxConcurrentRequests);

        EventBus.Subscribe<ServerNodeLlmRequest>(cmd =>
        {
            Task.Run(async () =>
            {
                if (ModelList.Contains(cmd.Model))
                {
                    List<TMessage> outGoingMsgs = MessageConversion(cmd.messages);
                    LlmStreamSlice? requestReturnVal = await RequestChat(outGoingMsgs, cmd.Model, cmd.temperature);
                    await SignalRWorkflowHub.LlmNodeResponse(hubContext, cmd.ConnectionId, cmd.NodeId, requestReturnVal?.message ?? "");
                }
            });
        });
    }

    public async IAsyncEnumerable<LlmStreamSlice?> RequestChatAsStreamFake(List<ChatMessageData> _, string __, float ___, List<ToolDefinition>? ____ = null)
    {
        string fakeMessage = "I'm glad you provided the system information!\r\n\r\nAs we work together in a team, I'll follow the working principle:\r\n\r\nStrategic Thinking Before Action:\r\nBefore making any function calls, I'll share my thought process with the team. For example, if I need to perform some operation on a file, I might think: \"Hey team, before executing this command, I want to make sure that it's safe to do so without affecting other parts of the system. Let me check the current status by running `git status` and verifying that there are no uncommitted changes. If everything looks good, I'll proceed with the function call.\"\r\nExpected outcomes: Successful execution of the function call without any unexpected consequences.\r\n\r\nProactive Engagement:\r\nWhen a function call is successfully executed, I won't wait for explicit instructions for the next step. Instead, I'll initiate a relevant, subsequent function call and explain my reasoning to the team.\r\nFor example: \"Hey team, since we just updated the file, let's run `git add` to stage the changes and then follow up with `git commit -m 'Updated file'`. This way, we can ensure that our changes are properly tracked in the version control system.\"\r\n\r\nReasoning: I'm making these subsequent function calls because they logically follow from the previous one, and it's more efficient to do so rather than asking for explicit instructions each time.\r\n\r\nAssumptions: None! I'll always verify the current status using `git` or other command-lines before responding. If there are any doubts or uncertainties, I'll clarify them with the team before taking any further actions.\r\n\r\nLet me know if this aligns with your expectations, and we can continue working together effectively!";

        int length = fakeMessage.Length;
        int index = 0;

        while (index < length)
        {
            await Task.Delay(15);

            int sliceLength = Math.Min(5, length - index);
            yield return new LlmStreamSlice(index / 5)
            {
                message = fakeMessage.Substring(index, sliceLength),
            };

            index += 5;
        }

        await Task.Delay(15);
        yield return new LlmStreamSlice(index / 5)
        {
            message = "<ToolCalls>",
            toolCallsOut = [new ToolCall() {
                Id = "aa",
                Index = 100,
                Type = "type",
                FunctionCall = new FunctionCall(){
                    Name = "ExecuteCommand",
                    Arguments = "dir C:\\",
                }
            }],
        };

        await Task.Delay(15);
        yield return null;
    }

    protected abstract List<TMessage> MessageConversion(List<LlmMessageSimp> llmMsgsSimp);

    public abstract IAsyncEnumerable<LlmStreamSlice?> RequestChatAsStream(List<ChatMessageData> messages, string model, float temperature, List<ToolDefinition>? toolSet = null);

    public abstract Task<LlmStreamSlice?> RequestChat(List<TMessage> messages, string model, float temperature, List<ToolDefinition>? toolSet = null);

    public async Task<LlmStreamSlice?> RequestChatSimp(List<LlmMessageSimp> msgsSimp, string model, float temp = 1.0f)
    {
        List<TMessage> chatMsgs = MessageConversion(msgsSimp);
        return await RequestChat(chatMsgs, model, temp);
    }

    protected abstract string ModelCheater(string requestedModel);
}