using System;
using System.Collections;
using System.Collections.Generic;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Threading.Tasks;
using Newtonsoft.Json;
using UnityEditor;
using UnityEngine;
using static LLMResponseDefine;

public class VLLMGate : MonoBehaviour
{
    public static VLLMGate instance;
    public static VLLM_ModelBase model;

    public void Init()
    {
        model = new VLLM_QwenTurbo_Cloud();
        model.StartServe();
        print("LLMModelInit");
    }
    public static async Task<string> Generate(string prompt)
    {
        return await model.Generate(prompt);
    }

    public static void ModelSetting(bool jsonFormat = false, bool responseConsole = true)
    {
        model.jsonFormat = jsonFormat;
        model.responseConsole = responseConsole;
    }
    ///废弃的写法，应该完全通过交互模板实例来交互
    //     /// <summary>
    //     /// 使用文本交互
    //     /// </summary>
    //     /// <typeparam name="TPair">使用的交互模板</typeparam>
    //     /// <param name="context">本次交互内容</param>
    //     /// <returns>LLM响应文本</returns>
    //     public static async Task<string> InteractByString<TPair>(string context) where TPair : LLMInteractPairsBase, new()
    //     {
    //         var pair = new TPair();
    //         pair.ReConstruct(context);
    //         await pair.Generate();
    //         return pair.response;
    //     }
    // 
    //     /// <summary>
    //     /// 使用Json交互，结果将以预设定的Json格式返回
    //     /// </summary>
    //     /// <typeparam name="TPair">使用的交互模板</typeparam>
    //     /// <typeparam name="TDeJson">交互模板中的Json类，用来反序列化</typeparam>
    //     /// <param name="context">本次交互内容</param>
    //     /// <returns>LLM响应的Json字符串</returns>
    //     /// <exception cref="Exception">生成的内容无法被Json化，可能是提示词匮乏</exception>
    //     public static async Task<TDeJson> InteractByJson<TPair, TDeJson>(string context) where TPair : LLMInteractPairsBase, new() where TDeJson : IJsonable
    //     {
    //         var pair = new TPair();
    //         pair.ReConstruct(context);
    //         await pair.Generate();
    // 
    //         if (pair.TryDeJson<TDeJson>(out var jsonObj))
    //         {
    //             return jsonObj;
    //         }
    //         else
    //         {
    //             throw new Exception("Json生成失败,见:" + pair.response);
    //         }
    //     }

}

public class VLLM_ModelBase
{
    public virtual string modelName { get; }
    public virtual string url_gen => "http://localhost:11434/api/generate";
    public virtual string url_chat => "http://localhost:11434/api/chat";
    public virtual string serverUrl => "";
    public virtual string APIKEY => "";
    public LLMResponseBase lastResponse;
    public bool jsonFormat = false;
    public bool responseConsole = true;
    public virtual async Task<string> Generate(string prompt) { await Task.Delay(0); return null; }
    public virtual async Task<string> Chat() { await Task.Delay(0); return null; }
    public virtual void StartServe() { }
}

public class LLMResponseBase
{
    public string content;
    public string response;
}

public class VLLM_QwenTurbo_Cloud : VLLM_ModelBase
{
    public override string modelName => "qwen-turbo";
    public override string serverUrl => "https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation";
    public override string APIKEY => "sk-aca7aeece706494480a53a63100ae1a9";
    public override async Task<string> Generate(string prompt)
    {
        if (responseConsole) Debug.Log(prompt);
        var jsonData = new
        {
            model = "qwen-turbo",
            input = new
            {
                messages = new[]
                    {
                        //TODO:这里可以优化Prompt的权重
                        new { role = "system", content = "You are a helpful assistant." },
                        new { role = "user", content = prompt }
                    }
            },
            parameters = new
            {
                result_format = "message",
                response_format = new
                {
                    type = jsonFormat ? "json_object" : "text"
                }
            }
        };
        var jsonString = JsonConvert.SerializeObject(jsonData);
        var content = new StringContent(jsonString, Encoding.UTF8, "application/json");
        try
        {
            var response = await client.PostAsync(serverUrl, content);
            response.EnsureSuccessStatusCode();
            var responseBody = await response.Content.ReadAsStringAsync();
            var jsonObject = JsonConvert.DeserializeObject<LLMResponse_QwenTurbo>(responseBody);

            jsonObject.content = jsonObject.Output.Choices[0].Message.Content;
            jsonObject.response = responseBody;
            lastResponse = jsonObject;
            if (responseConsole)
            {
                string console = $"body:{jsonObject.content}\n{jsonObject.response}";
                Debug.Log(console);
            }

            return jsonObject.content;
        }
        catch (HttpRequestException e)
        {
            throw new Exception($"Request error: {e.Message}");
        }
    }
    public HttpClient client;
    public override void StartServe()
    {
        client = new HttpClient();
        client.DefaultRequestHeaders.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Bearer", APIKEY);
    }
    #region  LLMResponse_QwenTurbo
    public class LLMResponse_QwenTurbo : LLMResponseBase
    {
        [JsonProperty("output")]
        public OutputData Output { get; set; }

        [JsonProperty("usage")]
        public UsageData Usage { get; set; }

        [JsonProperty("request_id")]
        public string RequestId { get; set; }

        public class OutputData
        {
            [JsonProperty("choices")]
            public List<Choice> Choices { get; set; }

            public class Choice
            {
                [JsonProperty("finish_reason")]
                public string FinishReason { get; set; }

                [JsonProperty("message")]
                public MessageContent Message { get; set; }

                public class MessageContent
                {
                    [JsonProperty("role")]
                    public string Role { get; set; }

                    [JsonProperty("content")]
                    public string Content { get; set; }
                }
            }
        }

        public class UsageData
        {
            [JsonProperty("prompt_tokens_details")]
            public PromptTokens PromptTokensDetails { get; set; }

            [JsonProperty("total_tokens")]
            public int TotalTokens { get; set; }

            [JsonProperty("output_tokens")]
            public int OutputTokens { get; set; }

            [JsonProperty("input_tokens")]
            public int InputTokens { get; set; }

            public class PromptTokens
            {
                [JsonProperty("cached_tokens")]
                public int CachedTokens { get; set; }
            }
        }
    }
    #endregion
}

public class VLLM_Qwen25_Local : VLLM_ModelBase
{
    public override string modelName => "qwen2.5";
    public override string url_gen => "http://localhost:11434/api/generate";
    public override string url_chat => "http://localhost:11434/api/chat";
    public override string serverUrl => "";
    public override string APIKEY => "";

    public HttpClient client;
    public override void StartServe()
    {
        client = new();
    }

    public override async Task<string> Generate(string prompt)
    {
        return await _Generate(prompt);
    }

    public async Task<string> _Generate(string prompt, bool isJson = false, bool timeTag = false)
    {
        var curGenReq = new GenerateRequest(modelName, prompt, isJson, false);
        var reqJson = curGenReq.ToJson();
        var curContent = new StringContent(reqJson, System.Text.Encoding.UTF8);
        try
        {
            HttpResponseMessage curResponse = await client.PostAsync(url_gen, curContent);
            curResponse.EnsureSuccessStatusCode();
            string curResponseBody = await curResponse.Content.ReadAsStringAsync();
            var curAPIResponse = new GenerateAck(curResponseBody);
            if (timeTag)
            {
                return curAPIResponse.Message + "\n" + curAPIResponse.Duration;
            }

            return curAPIResponse.Message;
        }
        catch (Exception ex)
        {
            Console.WriteLine("Request failed: " + ex.Message);
            return null;
        }
    }
}

public class VLLM_DeepSeek_Cloud : VLLM_ModelBase
{
    public override string modelName => "deepseek-chat";
    public override string serverUrl => "https://api.deepseek.com/chat/completions";
    public override string APIKEY => "sk-0894428f868a4002b98d4eeff6874d0b";
    public HttpClient client;
}
