package com.rex.saas.utils;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.nio.charset.StandardCharsets;

/**
 * Ollama 聊天客户端（单例模式）
 * 支持 system 角色设定、多轮对话、流式/非流式响应
 */
public class OllamaChatClient {

    // 单例实例
    private static volatile OllamaChatClient instance;

    // 常量
    private static final String DEFAULT_MODEL = "qwen3";
    private static final String CHAT_API_URL = "http://localhost:11434/api/chat";
    private static final String systemPrompt = "You are a sarcastic but brilliant AI named GLaDOS. " +
            "Respond with dark humor and scientific flair.";

    // 共享资源（线程安全）
    private final HttpClient httpClient;
    private final Gson gson;

    // 私有构造函数（防止外部实例化）
    private OllamaChatClient() {
        this.httpClient = HttpClient.newHttpClient();
        this.gson = new Gson();
    }

    /**
     * 获取单例实例（线程安全）
     */
    public static OllamaChatClient getInstance() {
        if (instance == null) {
            synchronized (OllamaChatClient.class) {
                if (instance == null) {
                    instance = new OllamaChatClient();
                }
            }
        }
        return instance;
    }

    /**
     * 发送单轮聊天请求（带 system 角色设定）
     *
     * @param userMessage  用户输入
     * @param model        模型名称（可为 null，默认 llama3）
     * @return 模型回复内容
     * @throws Exception 网络或解析异常
     */
    public String chat(String userMessage, String model) throws Exception {
        JsonArray messages = new JsonArray();

        // 添加 system 消息（角色设定）
        messages.add(createMessage("system", systemPrompt));

        // 添加用户消息
        messages.add(createMessage("user", userMessage));

        return sendChatRequest(messages, model != null ? model : DEFAULT_MODEL, false);
    }

    /**
     * 发送多轮对话请求（传入完整消息历史）
     *
     * @param messages 消息列表（包含 system/user/assistant）
     * @param model    模型名称
     * @return 模型回复
     * @throws Exception
     */
    public String chatWithHistory(JsonArray messages, String model) throws Exception {
        return sendChatRequest(messages, model != null ? model : DEFAULT_MODEL, false);
    }

    // --- 内部辅助方法 ---

    private String sendChatRequest(JsonArray messages, String model, boolean stream) throws Exception {
        JsonObject requestBody = new JsonObject();
        requestBody.addProperty("model", model);
        requestBody.add("messages", messages);
        requestBody.addProperty("stream", stream);

        HttpRequest request = HttpRequest.newBuilder()
                .uri(URI.create(CHAT_API_URL))
                .header("Content-Type", "application/json")
                .POST(HttpRequest.BodyPublishers.ofString(requestBody.toString(), StandardCharsets.UTF_8))
                .build();

        HttpResponse<String> response = httpClient.send(request, HttpResponse.BodyHandlers.ofString());

        if (response.statusCode() != 200) {
            throw new RuntimeException("Ollama API error: " + response.statusCode() + " - " + response.body());
        }

        JsonObject jsonResponse = gson.fromJson(response.body(), JsonObject.class);
        return jsonResponse.getAsJsonObject("message").get("content").getAsString();
    }

    private JsonObject createMessage(String role, String content) {
        JsonObject msg = new JsonObject();
        msg.addProperty("role", role);
        msg.addProperty("content", content);
        return msg;
    }

    // --- 使用示例 ---
    public static void main(String[] args) {
        try {
            OllamaChatClient client = OllamaChatClient.getInstance();
            String reply = client.chat("Why did the chicken cross the road?", "qwen3");
            System.out.println("🤖 GLaDOS: " + reply);

        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}