﻿using StableDiffusionTools.Oobabooga;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;

namespace LLMEvaluation.Framework
{
    public class GenerationJob
    {
        private static HttpClient streamingClient;
        private static readonly JsonSerializerOptions options = new JsonSerializerOptions { Converters = { new JsonStringEnumConverter(new JsonNamingPolicyLowercase()) } };

        static GenerationJob()
        {
            streamingClient = new HttpClient();
        }

        public event EventHandler<GenerationUpdateEventArgs> GenerationUpdate;
        private void OnGenerationUpdate(GenerationUpdateEventArgs updated)
        {
            RaiseOnUIThread(GenerationUpdate, updated);
        }

        public event EventHandler<GenerationLoadingModelEventArgs> GenerationLoadingModel;
        private void OnGenerationLoadingModel(GenerationLoadingModelEventArgs loading)
        {
            RaiseOnUIThread(GenerationLoadingModel, loading);
        }

        public event EventHandler<GenerationLoadedModelEventArgs> GenerationLoadedModel;
        private void OnGenerationLoadedModel(GenerationLoadedModelEventArgs loaded)
        {
            RaiseOnUIThread(GenerationLoadedModel, loaded);
        }

        private void RaiseOnUIThread<T>(EventHandler<T> handler, T args)
        {
            if (handler != null)
            {
                if (context == null)
                {
                    handler(this, args);
                }
                else
                {
                    context.Post(delegate { handler(this, args); }, null);
                }
            }
        }

        public List<GenerationRun> Runs { get; set; } = new List<GenerationRun>();
        public string WebUIServer { get; set; } = "127.0.0.1";
        public int WebUIPort { get; set; } = 5000;
        public string WebUIOAuthToken { get; set; } = "";
        public float? Temperature { get; set; } = null;
        public float? FrequencyPenalty { get; set; } = null;
        public float? PresencePenalty { get; set; } = null;
        public float? TopP { get; set; } = null;
        public int? TopK { get; set; } = null;
        public int? MaxTokens { get; set; } = null;
        public int? MaxTokensSecond { get; set; } = null;
        public bool HasRuns { get => Runs.Count > 0; }
        public int GetModelInfoFailure { get; set; } = 0;
        public int LoadModelFailure { get; set; } = 0;
        public bool SkipModel { get; set; } = false;
        public bool SkipRun { get; set; } = false;
        public bool JobCancellated { get; private set; }

        private Dictionary<GenerationRun, Dictionary<ModelEntry, List<Message>>> generationRunsModelMessages = new Dictionary<GenerationRun, Dictionary<ModelEntry, List<Message>>>();
        private SynchronizationContext context = SynchronizationContext.Current;
        private CancellationTokenSource abortSource = new CancellationTokenSource();

        public GenerationJob(string server, int port, string token, float? temp, float? freq, float? presence, float? topp, int? topk, int? maxToken, int? maxTokenSec)
        {
            WebUIServer = server;
            WebUIPort = port;
            WebUIOAuthToken = token;
            Temperature = temp;
            FrequencyPenalty = freq;
            PresencePenalty = presence;
            TopP = topp;
            TopK = topk;
            MaxTokens = maxToken;
            MaxTokensSecond = maxTokenSec;
        }

        public async Task ProcessJob(InferenceOrder order)
        {
            await Task.Run(async () => 
            {
                int total = 0;
                int current = 0;

                //Populate message state dicts
                for (int i = 0; i < Runs.Count; i++)
                {
                    generationRunsModelMessages.Add(Runs[i], new Dictionary<ModelEntry, List<Message>>());
                    for (int j = 0; j < Runs[i].Models.Count; j++)
                        generationRunsModelMessages[Runs[i]].Add(Runs[i].Models[j], new List<Message>());

                    total += Runs[i].Models.Count * Runs[i].Runs;
                }

                //Populate work queue
                Queue<GenerationWork> generations = new Queue<GenerationWork>();
                if (order == InferenceOrder.Run)
                {
                    for (int i = 0; i < Runs.Count; i++)
                    {
                        var run = Runs[i];
                        for (int j = 0; j < run.Models.Count; j++)
                        {
                            generations.Enqueue(new GenerationWork(run, j));
                        }
                    }
                }
                else
                {
                    List<string> processModels = GenerationRun.GetUniqueModelNames(Runs);
                  
                    for (int k = 0; k < processModels.Count; k++)
                    {
                        for (int i = 0; i < Runs.Count; i++)
                        {
                            var run = Runs[i];
                            for (int j = 0; j < run.Models.Count; j++)
                            {
                                if (processModels[k].Equals(run.Models[j].Name, StringComparison.InvariantCultureIgnoreCase))
                                    generations.Enqueue(new GenerationWork(run, j));
                            }
                        }
                    }
                }

                //Process queue
                List<GenerationRun> skipRuns = new List<GenerationRun>();
                bool skippedRun = false;
                while (generations.Count > 0)
                {
                    GenerationWork work = generations.Dequeue();
                    var run = work.Run;
                    var model = run.Models[work.ModelIndex];

                    if (skipRuns.Contains(run))
                        continue;

                    if (abortSource.IsCancellationRequested)
                        break;

                    run.Run.Inference = InferenceState.Generating;
                    model.Inference = InferenceState.Generating;

                    var modelInfo = await WebUIAPI.GetModelInfoInternal();
                    bool shouldLoadModel = false;
                    bool success = modelInfo.Success;
                    if (success)
                    {
                        if (abortSource.IsCancellationRequested)
                            break;

                        shouldLoadModel = !modelInfo.Data.ModelName.Equals(model.Name, StringComparison.InvariantCultureIgnoreCase);
                        if (shouldLoadModel)
                        {
                            OnGenerationLoadingModel(new GenerationLoadingModelEventArgs(model.Name));
                            var loadModel = await WebUIAPI.LoadModelInternal(model.Name);
                            success = loadModel.Success;
                            if (!success)
                            {
                                LoadModelFailure++;
                                OnGenerationUpdate(new GenerationUpdateEventArgs(model, run.Run, new List<Message>(new Message[] { new Message(Role.Assistant, "") }), GenerationUpdateState.First, total, current));
                            }
                        }
                    }
                    else
                    {
                        GetModelInfoFailure++;
                    }

                    if (success)
                    {

                        if (shouldLoadModel)
                            OnGenerationLoadedModel(new GenerationLoadedModelEventArgs(model.Name));

                        bool skippingModel = false;
                        for (int k = 0; k < run.Runs; k++)
                        {
                            current += 1;

                            if (ShouldSkipModel())
                                skippingModel = true;

                            if (abortSource.IsCancellationRequested || skippingModel)
                                break;

                            var chatCompletionRequest = new ChatCompletion()
                            {
                                messages = run.PromptMessages,
                                //model = model.Name, //can be ignored.
                                temperature = Temperature,
                                max_tokens = MaxTokens ?? -1,
                                presence_penalty = PresencePenalty,
                                frequency_penalty = FrequencyPenalty,
                                top_p = TopP,
                                top_k = TopK,
                                n = 1,
                                max_tokens_second = MaxTokensSecond,
                                stream = true,
                                seed = (run.Seed == -1) ? null : (int?)(run.Seed + k)
                            };

                            string chatRequestJSON = JsonSerializer.Serialize(chatCompletionRequest, options);
                            try
                            {
                                using (var requestMessage = new HttpRequestMessage(HttpMethod.Post, FormatAPIUrl("chat/completions")))
                                {
                                    requestMessage.Headers.Authorization = new AuthenticationHeaderValue("Bearer", WebUIOAuthToken);
                                    requestMessage.Content = new StringContent(chatRequestJSON, Encoding.UTF8, "application/json");

                                    using (var cts = new CancellationTokenSource(TimeSpan.FromMilliseconds(600000)))
                                    using (CancellationTokenSource linkedCts = CancellationTokenSource.CreateLinkedTokenSource(cts.Token, abortSource.Token))
                                    using (var response = await streamingClient.SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, linkedCts.Token))
                                    {
                                        var statusOK = response.StatusCode == HttpStatusCode.OK;
                                        //var validationError = response.StatusCode == (HttpStatusCode)422;
                                        if (!statusOK)
                                        {
                                            //result = new RequestResult<ChatCompletionResult>(null, $"Unknown Error: {response.StatusCode}", false);
                                        }
                                        else
                                        {
                                            using (var contentStream = await response.Content.ReadAsStreamAsync())
                                            using (var streamReader = new StreamReader(contentStream))
                                            {
                                                var msgDict = generationRunsModelMessages[run][model];

                                                while (!streamReader.EndOfStream && !linkedCts.Token.IsCancellationRequested)
                                                {
                                                    if (ShouldSkipRun())
                                                    {
                                                        //if (!skipRuns.Contains(run))
                                                        //    skipRuns.Add(run);

                                                        SkipModel = true;
                                                        skippedRun = true;
                                                    }

                                                    if (ShouldSkipModel())
                                                    {
                                                        skippingModel = true;
                                                        break;
                                                    }

                                                    GenerationUpdateState currentState = GenerationUpdateState.First;
                                                    var message = await streamReader.ReadLineAsync();
                                                    if (message.StartsWith("data: "))
                                                    {
                                                        try
                                                        {
                                                            string json = message.Substring(6);
                                                            ChatCompletionResult chunk = JsonSerializer.Deserialize<ChatCompletionResult>(json, options);
                                                            Message delta = chunk.Choices[0].Delta;
                                                            if (msgDict.Count <= k)
                                                            {
                                                                msgDict.Add(new Message(delta.Role, delta.Content));
                                                            }
                                                            else
                                                            {
                                                                msgDict[k].Content += delta.Content;
                                                                currentState = chunk.Usage == null ? GenerationUpdateState.Middle : GenerationUpdateState.Last;
                                                            }

                                                            OnGenerationUpdate(new GenerationUpdateEventArgs(model, run.Run, msgDict, currentState, total, current));
                                                        }
                                                        catch (Exception ex) { }
                                                    }
                                                }
                                            }
                                        }
                                    }
                                }
                            }
                            catch (Exception ex) { }
                        }
                    }

                    model.Inference = InferenceState.Idle;

                    if ((ShouldSkipRun() || skippedRun) && !skipRuns.Contains(run))
                    {
                        skipRuns.Add(run);
                        run.Unschedule();
                        skippedRun = false;
                    }
                    else
                    {
                        run.Run.Inference = run.AllModelsIdle ? InferenceState.Idle : InferenceState.Scheduled;
                    }
                }
            });

            JobCancellated = abortSource.IsCancellationRequested;
            abortSource.Dispose();
            abortSource = null;
        }

        private bool ShouldSkipModel()
        {
            if (!SkipModel)
                return false;

            SkipModel = false;
            return true;
        }

        private bool ShouldSkipRun()
        {
            if (!SkipRun)
                return false;

            SkipRun = false;
            return true;
        }

        private string FormatAPIUrl(string apiName)
        {
            return $"http://{WebUIServer}:{WebUIPort}/v1/{apiName}";
        }

        public void Schedule() => SetInferenceState(InferenceState.Scheduled);
        public void Unschedule() => SetInferenceState(InferenceState.Idle);

        public void SetInferenceState(InferenceState state)
        {
            for (int i = 0; i < Runs.Count; i ++)
                Runs[i].SetInferenceState(state);
        }

        public void Cancel()
        {
            abortSource?.Cancel();
        }
    }
}
