﻿using System.Text;
using Newtonsoft.Json;

namespace TextAI
{
    public class OllamaClient
    {
        private readonly HttpClient _httpClient;
        private const string BaseUrl = "http://localhost:11434";

        public OllamaClient()
        {
            _httpClient = new HttpClient();
            _httpClient.Timeout = TimeSpan.FromMinutes(5);
        }

        public async Task<bool> IsOllamaRunningAsync()
        {
            try
            {
                var response = await _httpClient.GetAsync($"{BaseUrl}/api/tags");
                return response.IsSuccessStatusCode;
            }
            catch
            {
                return false;
            }
        }

        public async Task<List<string>> GetAvailableModelsAsync()
        {
            try
            {
                var response = await _httpClient.GetAsync($"{BaseUrl}/api/tags");
                if (response.IsSuccessStatusCode)
                {
                    var content = await response.Content.ReadAsStringAsync();
                    var result = JsonConvert.DeserializeObject<OllamaTagsResponse>(content);
                    return result?.Models?.Select(m => m.Name).ToList() ?? new List<string>();
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine($"获取模型列表失败: {ex.Message}");
            }
            return new List<string>();
        }

        public async Task<string> GenerateResponseAsync(string model, string prompt)
        {
            try
            {
                var request = new OllamaGenerateRequest
                {
                    Model = model,
                    Prompt = prompt,
                    Stream = false
                };

                var json = JsonConvert.SerializeObject(request);
                var content = new StringContent(json, Encoding.UTF8, "application/json");

                var response = await _httpClient.PostAsync($"{BaseUrl}/api/generate", content);

                if (response.IsSuccessStatusCode)
                {
                    var responseContent = await response.Content.ReadAsStringAsync();
                    var result = JsonConvert.DeserializeObject<OllamaGenerateResponse>(responseContent);
                    return result?.Response?.Trim() ?? "没有收到响应";
                }
                else
                {
                    return $"请求失败: {response.StatusCode}";
                }
            }
            catch (Exception ex)
            {
                return $"发生错误: {ex.Message}";
            }
        }
    }

    public class OllamaTagsResponse
    {
        [JsonProperty("models")]
        public List<OllamaModel> Models { get; set; } = new();
    }

    public class OllamaModel
    {
        [JsonProperty("name")]
        public string Name { get; set; } = string.Empty;
    }

    public class OllamaGenerateRequest
    {
        [JsonProperty("model")]
        public string Model { get; set; } = string.Empty;

        [JsonProperty("prompt")]
        public string Prompt { get; set; } = string.Empty;

        [JsonProperty("stream")]
        public bool Stream { get; set; }
    }

    public class OllamaGenerateResponse
    {
        [JsonProperty("model")]
        public string Model { get; set; } = string.Empty;

        [JsonProperty("response")]
        public string Response { get; set; } = string.Empty;

        [JsonProperty("done")]
        public bool Done { get; set; }
    }

    class Program
    {
        static async Task Main(string[] args)
        {
            Console.OutputEncoding = Encoding.UTF8;
            Console.InputEncoding = Encoding.UTF8;

            Console.WriteLine("=== 本地 AI 对话客户端 ===");
            Console.WriteLine("正在检查 Ollama 服务状态...");

            var client = new OllamaClient();

            if (!await client.IsOllamaRunningAsync())
            {
                Console.WriteLine("❌ Ollama 服务未运行！");
                Console.WriteLine("请确保 Ollama 已安装并正在运行：");
                Console.WriteLine("1. 下载并安装 Ollama (https://ollama.ai/)");
                Console.WriteLine("2. 启动 Ollama 服务");
                Console.WriteLine("3. 拉取一个模型，例如: ollama pull llama3");
                Console.WriteLine("按任意键退出...");
                Console.ReadKey();
                return;
            }

            Console.WriteLine("✅ Ollama 服务运行正常");
            Console.WriteLine("正在获取可用模型...");

            var models = await client.GetAvailableModelsAsync();

            if (models.Count == 0)
            {
                Console.WriteLine("❌ 未找到可用模型");
                Console.WriteLine("请先拉取模型，例如: ollama pull llama3");
                Console.WriteLine("按任意键退出...");
                Console.ReadKey();
                return;
            }

            Console.WriteLine("\n可用模型:");
            for (int i = 0; i < models.Count; i++)
            {
                Console.WriteLine($"{i + 1}. {models[i]}");
            }

            string selectedModel;
            if (models.Count == 1)
            {
                selectedModel = models[0];
                Console.WriteLine($"\n自动选择模型: {selectedModel}");
            }
            else
            {
                Console.Write("\n请选择模型 (输入序号): ");
                var input = Console.ReadLine();

                if (!int.TryParse(input, out int choice) || choice < 1 || choice > models.Count)
                {
                    Console.WriteLine("无效选择，使用第一个模型");
                    selectedModel = models[0];
                }
                else
                {
                    selectedModel = models[choice - 1];
                }
            }

            Console.WriteLine($"\n✅ 已选择模型: {selectedModel}");
            Console.WriteLine("\n现在可以开始提问了！");
            Console.WriteLine("输入 'quit' 或 'exit' 退出程序");
            Console.WriteLine("输入 'clear' 清空屏幕");
            Console.WriteLine(new string('-', 50));

            while (true)
            {
                Console.Write("\n🤔 你的问题: ");
                var question = Console.ReadLine();

                if (string.IsNullOrWhiteSpace(question))
                    continue;

                if (question.ToLower() is "quit" or "exit")
                    break;

                if (question.ToLower() == "clear")
                {
                    Console.Clear();
                    continue;
                }

                Console.Write("🤖 AI 正在思考...");

                var response = await client.GenerateResponseAsync(selectedModel, question);

                // 覆盖“正在思考...”的提示
                Console.SetCursorPosition(0, Console.CursorTop);
                Console.WriteLine(new string(' ', 60));
                Console.SetCursorPosition(0, Console.CursorTop - 1);

                Console.WriteLine($"\n💡 AI 回答: {response}");
                Console.WriteLine(new string('-', 50));
            }

            Console.WriteLine("\n感谢使用本地 AI 客户端！再见！");
        }
    }
}