using Microsoft.AspNetCore.SignalR;
using MultiAgentsClient.ChatDataStructures;
using MultiAgentsClient.Shared;
using MultiAgentsServer.Scripts.Hubs;
using MultiAgentsShared;
using NPinyin;
using OpenAI;
using OpenAI.Managers;
using OpenAI.ObjectModels;
using OpenAI.ObjectModels.RequestModels;
using OpenAI.ObjectModels.ResponseModels;

namespace MultiAgentsServer.Scripts.Services;

/// <summary>
/// Instead of calling request of this class directly, its better to inherit from APIRequestBaseClass
/// then use its Requesting Method, the baseClass will manage the callbacks from APIComService. 
/// </summary>
public class OpenAIComService : LLmComService<ChatMessageData>
{
    public override List<string> ModelList => [
        Models.EnumToString(Models.Model.Gpt_4o_mini_2024_07_18),
        Models.EnumToString(Models.Model.Gpt_4o_2024_05_13),
        Models.EnumToString(Models.Model.Gpt_4_turbo_2024_04_09),
    ];

    private OpenAIService OpenAIService { get; set; }

    public OpenAIComService(IEventBus eventBusIn, IHubContext<SignalRWorkflowHub> hubContext, IConfiguration configuration) : base(eventBusIn, hubContext, configuration)
    {
        string? ApiKey;
#if DEVELOP
        ApiKey = Environment.GetEnvironmentVariable("OPENAI_KEY_DEVELOP");
#elif RELEASE
        ApiKey = Environment.GetEnvironmentVariable("OPENAI_KEY");
#elif LOCALDEV
        ApiKey = Environment.GetEnvironmentVariable("OPENAI_KEY_LocalDev");
#endif

        var appSettingSpecifiedBaseDomain = configuration.GetValue<string>("ApiBaseDomain");

        if (ApiKey == null)
        {
            Debug.LogError("OPENAI_KEY not found.");
            return;
        }

        OpenAIService = new OpenAIService(new OpenAiOptions()
        {
            ApiKey = ApiKey
        }, appSettingSpecifiedBaseDomain);
    }

    protected override string ModelCheater(string requestedModel)
    {
        string? usingModel = requestedModel;

        Debug.LogLine($"RequestedModel:{requestedModel}, UsingModel:{usingModel}\r");
        return usingModel;
    }

    private static string GetMimeType(string fileName)
    {
        string extension = Path.GetExtension(fileName).ToLowerInvariant();
        switch (extension)
        {
            case ".jpg":
            case ".jpeg":
                return "image/jpeg";
            case ".png":
                return "image/png";
            case ".gif":
                return "image/gif";
            case ".bmp":
                return "image/bmp";
            default:
                return "application/octet-stream";
        }
    }

    private static List<ChatMessage> ConvertMessageFormat(List<ChatMessageData> chatMessageDatas)
    {
        List<ChatMessage> messages = [];

        foreach (ChatMessageData message in chatMessageDatas)
        {
            List<MessageContent> contents = [];

            contents.Add(MessageContent.TextContent(message.content));

            foreach (KeyValuePair<int, (string fileName, string fileContent)> image in message.images)
            {
                byte[] imageBytes = Convert.FromBase64String(image.Value.fileContent);
                string mimeType = GetMimeType(image.Value.fileName);

                contents.Add(MessageContent.ImageBinaryContent(imageBytes, mimeType));
            }

            foreach (KeyValuePair<int, (string fileName, string fileContent)> file in message.textFiles)
            {
                contents.Add(MessageContent.TextContent("File name: " + file.Value.fileName + "\r" + "File content: " + file.Value.fileContent));
            }

            string? author = Pinyin.GetPinyin(message.author).Replace(" ", "");
            if (string.IsNullOrEmpty(author))
            {
                author = null;
            }

            messages.Add(new ChatMessage(message.role, contents, author, message.toolCalls, message.toolCallId));
        }
        return messages;
    }

    public override async IAsyncEnumerable<LlmStreamSlice?> RequestChatAsStream(
        List<ChatMessageData> messages,
        string requestedModel,
        float temperature,
        List<ToolDefinition>? toolSet = null)
    {
        var usingModel = ModelCheater(requestedModel);
        TimeSpan timeout = TimeSpan.FromSeconds(120); // 2 min
        CancellationTokenSource cts = new();

        if (await Semaphore.WaitAsync(5000))  // 5 s
        {
            var releaseTask = Task.Delay(timeout, cts.Token).ContinueWith(t =>
            {
                if (!t.IsCanceled)
                {
                    Semaphore.Release();
                    Console.WriteLine("Semaphore released automatically after timeout.");
                }
            });

            IAsyncEnumerable<ChatCompletionCreateResponse>? completionResult = null;
            try
            {
                completionResult = OpenAIService.ChatCompletion.CreateCompletionAsStream(new ChatCompletionCreateRequest()
                {
                    Messages = ConvertMessageFormat(messages),
                    Temperature = temperature,
                    Tools = toolSet,
                    Model = usingModel,
                });
            }
            catch (Exception e)
            {
                Debug.LogError($"{e.Message}");
            }

            if (completionResult == null)
            {
                Semaphore.Release();
                cts.Cancel();
                yield return null;
                yield break;
            }

            LlmStreamSlice outputSlice;
            List<ToolCall>? toolCallsCache = null;
            int sliceCounter = 0;
            await foreach (var completion in completionResult)
            {
                if (completion.Successful)
                {
                    outputSlice = new LlmStreamSlice(sliceCounter++);

                    var firstChatMsgIncome = completion.Choices.FirstOrDefault()?.Message;
                    outputSlice.message = firstChatMsgIncome?.Content;


                    // Debug.Log(outputSlice.message ?? "", ConsoleColor.White, false);
                    if (outputSlice.message == null)
                    {
                        Debug.Log("(NULL MSG)", ConsoleColor.Yellow, false);
                    }

                    var toolCallsIn = firstChatMsgIncome?.ToolCalls;
                    if (toolCallsIn != null && toolCallsIn.Count > 0) //init toolCalls list if needed
                    {
                        toolCallsCache ??= [];

                        foreach (var toolCallIn in toolCallsIn)
                        {
                            var fnCallIn = toolCallIn.FunctionCall;

                            if (fnCallIn != null)
                            {
                                if (string.IsNullOrEmpty(fnCallIn.Name) ==
                                    false) //fn only has name if new function, thus create new ToolCall here and assign name
                                {
                                    //since toolcall also comes in as stream. When a newToolCall is added
                                    toolCallsCache.Add(new ToolCall()
                                    {
                                        Type = toolCallIn.Type,
                                        Id = toolCallIn.Id,
                                        FunctionCall = new FunctionCall()
                                        {
                                            Name = fnCallIn.Name,
                                            Arguments = "",
                                        }
                                    });
                                }

                                if (string.IsNullOrEmpty(fnCallIn.Arguments) ==
                                    false) //always assign args to the last of toolCall
                                {
                                    toolCallsCache.Last().FunctionCall.Arguments += fnCallIn.Arguments;
                                }
                            }
                        }
                    }

                    yield return outputSlice;
                }
                else
                {
                    if (completion.Error == null)
                    {
                        Debug.LogError("Unknown Error");
                    }
                    else
                    {
                        Debug.LogError($"{completion.Error.Code}: {completion.Error.Message}");
                    }
                }
            }

            if (toolCallsCache != null && toolCallsCache.Count > 0)
            {
                LlmStreamSlice outputSliceToolCalls = new(sliceCounter++)
                {
                    toolCallsOut = toolCallsCache
                };

                Debug.Log($"ToolCalls: {toolCallsCache.Count}. ", ConsoleColor.DarkCyan, false);
                yield return outputSliceToolCalls;
            }

            Debug.LogLine("(END)", ConsoleColor.Red, false);

            //release no matter success or failur
            Semaphore.Release();
            cts.Cancel();

            yield return null;
        }
        else
        {
            Debug.LogError("Timeout occurred while waiting for the semaphore.");

            LlmStreamSlice mssage = new(0)
            {
                message = "The number of concurrent connections reached the upper limit. Please try again later.",
            };
            yield return mssage;
            yield return null;
            yield break;
        }
    }

    protected override List<ChatMessageData> MessageConversion(List<LlmMessageSimp> llmMsgsSimp)
    {
        List<ChatMessageData> returnList = [];
        foreach (var simpMsg in llmMsgsSimp)
        {
            switch (simpMsg.MessageType)
            {
                case LlmMessageSimp.LlmMessageType.system:
                    returnList.Add(ChatMessageData.FromSystem(0, 0, simpMsg.Message, [], 0, DateTime.Now));
                    break;
                case LlmMessageSimp.LlmMessageType.user:
                    returnList.Add(ChatMessageData.FromUser(0, 0, simpMsg.Message, [], [], [], 0, simpMsg.MessageType.ToString(), DateTime.Now));
                    break;
                case LlmMessageSimp.LlmMessageType.assistant:
                    returnList.Add(ChatMessageData.FromAssistant(0, 0, simpMsg.Message, [], 0, simpMsg.MessageType.ToString(), true, DateTime.Now, null));
                    break;
                case LlmMessageSimp.LlmMessageType.tool:
                    Debug.LogWarning("Not supported");
                    break;
            }
        }

        return returnList;
    }

    public override async Task<LlmStreamSlice?> RequestChat(
        List<ChatMessageData> messages,
        string requestedModel,
        float temperature,
        List<ToolDefinition>? toolSet = null)
    {
        var usingModel = ModelCheater(requestedModel);

        TimeSpan timeout = TimeSpan.FromSeconds(120); // 2 min
        CancellationTokenSource cts = new();

        if (await Semaphore.WaitAsync(5000)) // 5 s
        {
            var releaseTask = Task.Delay(timeout, cts.Token).ContinueWith(t =>
            {
                if (!t.IsCanceled)
                {
                    Semaphore.Release();
                    Console.WriteLine("Semaphore released automatically after timeout.");
                }
            });

            ChatCompletionCreateResponse? completionResult = null;
            try
            {
                completionResult = await OpenAIService.ChatCompletion.CreateCompletion(new ChatCompletionCreateRequest
                {
                    Messages = ConvertMessageFormat(messages),
                    Temperature = temperature,
                    Tools = toolSet,
                    Model = usingModel,
                });
            }
            catch (Exception e)
            {
                Debug.LogError($"{e.Message}");
            }

            if (completionResult == null)
            {
                Semaphore.Release();
                cts.Cancel();
                return null;
            }

            LlmStreamSlice llmResult = new(0);
            if (completionResult?.Successful ?? false)
            {
                var choice = completionResult.Choices.First();

                llmResult.message = choice.Message.Content;
                if (choice.Message.ToolCalls != null && choice.Message.ToolCalls.Count > 0)
                {
                    llmResult.toolCallsOut = (List<ToolCall>?)choice.Message.ToolCalls;
                }

                Semaphore.Release();
                cts.Cancel();
                return llmResult;
            }
            else
            {
                if (completionResult?.Error == null)
                {
                    Debug.LogError("Unknown Error");
                }
                else
                {
                    Debug.LogError($"{completionResult.Error.Code}: {completionResult.Error.Message}");
                }
            }


            Semaphore.Release();
            cts.Cancel();
            return null;
        }
        else
        {
            Debug.LogError("Timeout occurred while waiting for the semaphore.");

            LlmStreamSlice mssage = new(0)
            {
                message = "The number of concurrent connections reached the upper limit. Please try again later.",
            };
            return mssage;
        }
    }


    ///Json Mode:
    //ResponseFormat
}
