using LibGit2Sharp;
using Microsoft.AspNetCore.SignalR;
using MultiAgentsClient.ChatDataStructures;
using MultiAgentsClient.Shared;
using MultiAgentsServer.Scripts.Hubs;
using MultiAgentsShared;
using OpenAI.ObjectModels.RequestModels;
using System.Text;
using System.Text.Json;
using System.Text.RegularExpressions;
using static MultiAgentsServer.Scripts.Shared.ModelConsts;

namespace MultiAgentsServer.Scripts.Services;


public class OllamaComService(IEventBus eventBusIn, IHubContext<SignalRWorkflowHub> hubContext, IConfiguration configuration) : LLmComService<ChatMessageData>(eventBusIn, hubContext, configuration)
{
    public override List<string> ModelList => [
        LlmModels.Llama3_70b.ToString(),
        LlmModels.Qwen2_72b.ToString(),
        LlmModels.Qwen2_72b_instruct.ToString(),
    ];

    protected override string ModelCheater(string requestedModel)
    {
        var usingModel = "";
        if (requestedModel == LlmModels.Llama3_70b.ToString())
        {
#if LOCALDEV
            usingModel = "llama3.1:8b";
#else
            usingModel = "llama3.1:70b";
#endif
        }
        else if (requestedModel == LlmModels.Qwen2_72b.ToString())
        {
#if LOCALDEV
            usingModel = "qwen2:latest";
#else
            usingModel = "qwen2:72b";
#endif
        }
        else if (requestedModel == LlmModels.Qwen2_72b_instruct.ToString())
        {
#if LOCALDEV 
            usingModel = "qwen2:latest";
#else
            usingModel = "qwen2:72b-instruct";
#endif
        }

        Debug.LogLine($"RequestedModel:{requestedModel}, UsingModel:{usingModel}\r");
        return usingModel;
    }

    public override async IAsyncEnumerable<LlmStreamSlice?> RequestChatAsStream(List<ChatMessageData> messages, string requestedModel, float temperature, List<ToolDefinition>? toolSet = null)
    {
        Task? noStreamRequestForToolCall = null;
        if (toolSet != null && toolSet.Count != 0)
        {
            noStreamRequestForToolCall = RequestChat(messages, requestedModel, temperature, toolSet);
        }

        int sliceCounter = 0;

        var formattedMessages = messages.Select(m =>
        {
            string? fileContent = null;

            foreach (KeyValuePair<int, (string fileName, string fileContent)> file in m.textFiles)
            {
                fileContent += "\rFile name: " + file.Value.fileName + "\rFile content: " + file.Value.fileContent;
            }

            List<byte[]> images = m.images.Select(image => Convert.FromBase64String(image.Value.fileContent)).ToList();

            return new
            {
                m.role,
                content = m.content + fileContent,
                images,
            };
        }
        ).ToList();

        var formattedTools = toolSet?.Select(tool => new
        {
            type = "function",
            function = new
            {
                name = tool.Function.Name,
                description = tool.Function.Description,
                parameters = new
                {
                    type = tool.Function.Parameters.Type,
                    properties = tool.Function.Parameters.Properties.ToDictionary(
                        param => param.Key,
                        param => new
                        {
                            type = param.Value.Type,
                            description = param.Value.Description
                        }
                    ),
                }
            }
        }).ToList();

        string usingModel = ModelCheater(requestedModel);

        var requestBody = new
        {
            model = usingModel,
            messages = formattedMessages,
            stream = true,
            keep_alive = "1h",
            options = new
            {
                temperature,
            },
            tools = formattedTools,
        };

        string jsonRequest = JsonSerializer.Serialize(requestBody);

        HttpResponseMessage response;

        using HttpClient httpClient = new();
        response = await httpClient.SendAsync(
            new HttpRequestMessage(HttpMethod.Post, "http://localhost:11434/api/chat")
            {
                Content = new StringContent(jsonRequest, Encoding.UTF8, "application/json")
            },
            HttpCompletionOption.ResponseHeadersRead
        );

        if (!response.IsSuccessStatusCode)
        {
            Debug.LogWarning($"Failed to get response. Status code: {response.StatusCode}.");
            yield break;
        }

        using Stream responseStream = await response.Content.ReadAsStreamAsync();
        using StreamReader reader = new(responseStream);

        string? line;
        while ((line = await reader.ReadLineAsync()) != null)
        {
            if (!string.IsNullOrWhiteSpace(line))
            {
                ChatResponse? jsonResponse = JsonSerializer.Deserialize<ChatResponse>(line);
                if (jsonResponse != null)
                {
                    if (!jsonResponse.done)
                    {
                        yield return new LlmStreamSlice(sliceCounter++)
                        {
                            message = jsonResponse.message.content
                        };
                    }
                    else
                    {
                        if (noStreamRequestForToolCall != null)
                        {
                            LlmStreamSlice? llmStreamSlice = await (noStreamRequestForToolCall as Task<LlmStreamSlice?>);

                            yield return new LlmStreamSlice(sliceCounter++)
                            {
                                toolCallsOut = llmStreamSlice.toolCallsOut
                            };
                        }

                        yield return null;
                    }
                }
            }
        }
    }

    public override async Task<LlmStreamSlice?> RequestChat(List<ChatMessageData> messages, string requestedModel, float temperature, List<ToolDefinition>? toolSet = null)
    {
        var formattedMessages = messages.Select(m =>
        {
            string fileContent = "";

            foreach (KeyValuePair<int, (string fileName, string fileContent)> file in m.textFiles)
            {
                fileContent += "\rFile name: " + file.Key + "\rFile content: " + file.Value.Item2;
            }

            List<byte[]> images = m.images.Select(image => Convert.FromBase64String(image.Value.Item2)).ToList();

            return new
            {
                m.role,
                content = m.content + fileContent,
                images,
            };
        }
        ).ToList();

        var usingModel = ModelCheater(requestedModel);

        var formattedTools = toolSet?.Select(tool => new
        {
            type = "function",
            function = new
            {
                name = tool.Function.Name,
                description = tool.Function.Description,
                parameters = new
                {
                    type = tool.Function.Parameters.Type,
                    properties = tool.Function.Parameters.Properties.ToDictionary(
                        param => param.Key,
                        param => new
                        {
                            type = param.Value.Type,
                            description = param.Value.Description
                        }
                    ),
                }
            }
        }).ToList();

        var requestBody = new
        {
            model = usingModel,
            messages = formattedMessages,
            stream = false,
            keep_alive = "1h",
            options = new
            {
                temperature,
            },
            tools = formattedTools,
        };

        string jsonRequest = JsonSerializer.Serialize(requestBody);

        StringContent content = new(jsonRequest, Encoding.UTF8, "application/json");

        using HttpClient httpClient = new();
        HttpResponseMessage response = await httpClient.PostAsync("http://localhost:11434/api/chat", content);

        if (!response.IsSuccessStatusCode)
        {
            Debug.LogError($"Failed to get response. Status code: {response.StatusCode}");
            return null;
        }

        string responseContent = await response.Content.ReadAsStringAsync();

        string pattern = @"""arguments"":\s*({.*?})";

        responseContent = Regex.Replace(responseContent, pattern, match =>
        {
            string matchedArguments = match.Groups[1].Value;
            string argumentsAsString = JsonSerializer.Serialize(JsonDocument.Parse(matchedArguments).RootElement);
            argumentsAsString = argumentsAsString.Replace("\\", "\\\\").Replace("\"", "\\\"");
            return "\"arguments\": \"" + argumentsAsString + "\"";
        });

        ChatResponse? jsonResponse = JsonSerializer.Deserialize<ChatResponse>(responseContent);

        LlmStreamSlice llmResult = new(0);
        if (jsonResponse != null)
        {
            List<ToolCall> toolCallsCache = [];

            List<ToolCall> toolCallsIn = jsonResponse.message.tool_calls;
            if (toolCallsIn != null && toolCallsIn.Count > 0) //init toolCalls list if needed
            {
                foreach (ToolCall toolCallIn in toolCallsIn)
                {
                    FunctionCall? fnCallIn = toolCallIn.FunctionCall;

                    if (fnCallIn != null)
                    {
                        if (string.IsNullOrEmpty(fnCallIn.Name) ==
                            false) //fn only has name if new function, thus create new ToolCall here and assign name
                        {
                            //since toolcall also comes in as stream. When a newToolCall is added
                            toolCallsCache.Add(new ToolCall()
                            {
                                Type = toolCallIn.Type,
                                Id = toolCallIn.Id,
                                FunctionCall = new FunctionCall()
                                {
                                    Name = fnCallIn.Name,
                                    Arguments = "",
                                }
                            });
                        }

                        if (string.IsNullOrEmpty(fnCallIn.Arguments) ==
                            false) //always assign args to the last of toolCall
                        {
                            toolCallsCache.Last().FunctionCall.Arguments += fnCallIn.Arguments;
                        }
                    }
                }
            }

            if (toolCallsCache != null && toolCallsCache.Count > 0)
            {
                foreach(ToolCall toolCall in toolCallsCache)
                {
                    toolCall.Id = toolCall.Id ?? new Guid().ToString();
                }
                llmResult.toolCallsOut = toolCallsCache;
            }

            llmResult.message = jsonResponse.message.content;
            return llmResult;
        }
        else
        {
            return null;
        }
    }

    protected override List<ChatMessageData> MessageConversion(List<LlmMessageSimp> llmMsgsSimp)
    {
        List<ChatMessageData> returnList = [];
        foreach (var simpMsg in llmMsgsSimp)
        {
            switch (simpMsg.MessageType)
            {
                case LlmMessageSimp.LlmMessageType.system:
                    returnList.Add(ChatMessageData.FromSystem(0, 0, simpMsg.Message, [], 0, DateTime.Now));
                    break;
                case LlmMessageSimp.LlmMessageType.user:
                    returnList.Add(ChatMessageData.FromUser(0, 0, simpMsg.Message, [], [], [], 0, simpMsg.MessageType.ToString(), DateTime.Now));
                    break;
                case LlmMessageSimp.LlmMessageType.assistant:
                    returnList.Add(ChatMessageData.FromAssistant(0, 0, simpMsg.Message, [], 0, simpMsg.MessageType.ToString(), true, DateTime.Now, null));
                    break;
                case LlmMessageSimp.LlmMessageType.tool:
                    Debug.LogWarning("Not supported");
                    break;
            }
        }

        return returnList;
    }
}

#pragma warning disable IDE1006
public class ChatResponse
{
    public string model { get; set; } = "";
    public string created_at { get; set; } = "";
    public Message message { get; set; } = new Message();
    public bool done { get; set; }
}

public class Message
{
    public string role { get; set; } = "";
    public string content { get; set; } = "";
    public List<ToolCall> tool_calls { get; set; } = [];
}
#pragma warning restore IDE1006