package com.yishuai.ollamatest1.service;

import com.yishuai.ollamatest1.dto.ChatResponse;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.PropertyNamingStrategies;
import com.fasterxml.jackson.databind.annotation.JsonNaming;
import okhttp3.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import java.io.IOException;
import java.time.Duration;

@Service
public class OllamaClientService {
    private static final Logger logger = LoggerFactory.getLogger(OllamaClientService.class);
    private final OkHttpClient client;
    private final ObjectMapper mapper = new ObjectMapper();
    private final String baseUrl;
    private final String model;

    public OllamaClientService(
            @Value("${ollama.host}") String host,
            @Value("${ollama.port}") int port,
            @Value("${ollama.model}") String model,
            @Value("${ollama.timeout:120000}") long timeoutMs
    ) {
        this.baseUrl = String.format("http://%s:%d/api", host, port);
        this.model = model;
        this.client = new OkHttpClient.Builder()
                .callTimeout(Duration.ofMillis(timeoutMs))
                .connectTimeout(Duration.ofMillis(10000))
                .readTimeout(Duration.ofMillis(timeoutMs))
                .build();
    }

    /**
     * 调用 Ollama 的 /api/chat，正确处理流式响应
     */
    public ChatResponse chat(String userMessage) throws IOException {
        logger.info("准备调用 Ollama API，用户消息: {}", userMessage);
        
        // 构造请求体：按照 Ollama API 文档，/api/chat 接受 messages 列表
        String json = mapper.createObjectNode()
                .put("model", model)
                .put("stream", false)  // 禁用流式响应，获取完整结果
                .set("messages", mapper.createArrayNode().add(
                        mapper.createObjectNode().put("role", "user").put("content", userMessage)
                )).toString();

        RequestBody body = RequestBody.create(json, MediaType.get("application/json; charset=utf-8"));
        Request req = new Request.Builder()
                .url(baseUrl + "/chat")
                .post(body)
                .build();

        logger.debug("发送请求到 Ollama API: {}", req.url());
        try (Response resp = client.newCall(req).execute()) {
            if (!resp.isSuccessful()) {
                String err = resp.body() != null ? resp.body().string() : "";
                logger.error("Ollama API 错误: {} {}", resp.code(), err);
                throw new IOException("Ollama API error: " + resp.code() + " " + err);
            }

            String respBody = resp.body() != null ? resp.body().string() : "";
            logger.info("Ollama 服务器返回原始数据: {}", respBody);
            
            JsonNode root = mapper.readTree(respBody);

            // 创建一个新的ObjectMapper实例，配置为使用下划线命名策略
            ObjectMapper underscoreMapper = new ObjectMapper();
            underscoreMapper.setPropertyNamingStrategy(PropertyNamingStrategies.SNAKE_CASE);

            // 将原始响应直接转换为Map，保留所有原始字段
            ChatResponse result = new ChatResponse();

            // 尝试提取模型信息和所有元数据字段
            if (root.has("model")) result.setModel(root.get("model").asText());
            if (root.has("created_at")) result.setCreatedAt(root.get("created_at").asText());
            if (root.has("done")) result.setDone(root.get("done").asBoolean());
            if (root.has("done_reason")) result.setDoneReason(root.get("done_reason").asText());
            if (root.has("total_duration")) result.setTotalDuration(root.get("total_duration").asLong());
            if (root.has("load_duration")) result.setLoadDuration(root.get("load_duration").asLong());
            if (root.has("prompt_eval_count")) result.setPromptEvalCount(root.get("prompt_eval_count").asInt());
            if (root.has("prompt_eval_duration")) result.setPromptEvalDuration(root.get("prompt_eval_duration").asLong());
            if (root.has("eval_count")) result.setEvalCount(root.get("eval_count").asInt());
            if (root.has("eval_duration")) result.setEvalDuration(root.get("eval_duration").asLong());

            // 解析 AI 回复
            String assistantReply = "";
            if (root.has("message") && root.get("message").has("content")) {
                assistantReply = root.get("message").get("content").asText();
            } else if (root.has("messages")) {
                for (JsonNode msg : root.get("messages")) {
                    if (msg.has("role") && "assistant".equals(msg.get("role").asText()) && msg.has("content")) {
                        assistantReply = msg.get("content").asText();
                        break;
                    }
                }
            } else if (root.has("choices")) {
                JsonNode choices = root.get("choices");
                if (choices.isArray() && choices.size() > 0) {
                    JsonNode firstChoice = choices.get(0);
                    if (firstChoice.has("message") && firstChoice.get("message").has("content")) {
                        assistantReply = firstChoice.get("message").get("content").asText();
                    } else if (firstChoice.has("content")) {
                        assistantReply = firstChoice.get("content").asText();
                    }
                }
            }
            
            logger.info("Ollama 助手回复: {}", assistantReply);
            result.setMessageContent(assistantReply);
            return result;
        }
    }
}