package com.hzy.service.rag;

import com.hzy.common.R;
import com.hzy.service.document.DocumentVerticle;
import com.hzy.service.ollama.OllamaVerticle;
import io.vertx.core.AbstractVerticle;
import io.vertx.core.Promise;
import io.vertx.core.json.JsonArray;
import io.vertx.core.json.JsonObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.List;
import java.util.stream.Collectors;

public class RAGVerticle extends AbstractVerticle {

    private static final Logger LOGGER = LoggerFactory.getLogger(RAGVerticle.class);

    public static final String RAG_REQUEST_ADDRESS = "rag.request";

    @Override
    public void start(Promise<Void> startPromise) {
        // 移除内存知识库的初始化

        vertx.eventBus().consumer(RAG_REQUEST_ADDRESS, message -> {
            JsonObject request = (JsonObject) message.body();
            String userQuery = request.getString("query");
            String model = request.getString("model", "qwen2.5:0.5b"); // 默认模型，可配置

            if (userQuery == null || userQuery.isEmpty()) {
                message.fail(400, "Query cannot be empty for RAG request.");
                return;
            }

            LOGGER.info("Received RAG request for query: '{}' using model: '{}'", userQuery, model);

            // 1. 检索阶段：通过 Event Bus 调用 DocumentVerticle 从 Elasticsearch 查找相关信息
            vertx.eventBus().<JsonArray>request(DocumentVerticle.SEARCH_DOCUMENTS_ADDRESS, userQuery)
                    .onSuccess(docReply -> {
                        List<String> retrievedContext = docReply.body().stream()
                                .map(Object::toString)
                                .collect(Collectors.toList());
                        LOGGER.debug("Retrieved context from Elasticsearch: {}", retrievedContext);

                        // 2. 增强 Prompt 阶段：将检索到的信息和用户问题结合
                        String augmentedPrompt = buildAugmentedPrompt(userQuery, retrievedContext);
                        LOGGER.debug("Augmented prompt sent to Ollama: {}", augmentedPrompt);

                        // 3. 生成阶段：调用 OllamaVerticle
                        JsonObject ollamaRequest = new JsonObject()
                                .put("model", model)
                                .put("stream", false)
                                .put("prompt", augmentedPrompt);

                        vertx.eventBus().<JsonObject>request(OllamaVerticle.OLLAMA_REQUEST_ADDRESS, ollamaRequest)
                                .onSuccess(ollamaResponse -> {
                                    LOGGER.info("RAG generation successful for query: '{}'", userQuery);
                                    message.reply(ollamaResponse);
                                })
                                .onFailure(throwable -> {
                                    LOGGER.error("RAG generation (Ollama step) failed for query: '{}': {}", userQuery, throwable.getMessage());
                                    message.fail(500, new JsonObject()
                                            .put("error", "RAG generation failed (Ollama step)")
                                            .put("details", throwable.getMessage())
                                            .encodePrettily());
                                });
                    })
                    .onFailure(throwable -> {
                        LOGGER.error("RAG retrieval (DocumentVerticle/Elasticsearch step) failed for query: '{}': {}", userQuery, throwable.getMessage());
                        message.fail(500, R.error("RAG retrieval failed (DocumentVerticle/Elasticsearch step)").encodePrettily());
                    });
        });

        LOGGER.info("RAGVerticle deployed successfully.");
        startPromise.complete();
    }

    /**
     * 构建增强后的 Prompt。
     * 格式通常是：[System Prompt] + [Context] + [User Query]
     *
     * @param userQuery 用户原始查询
     * @param context   检索到的上下文信息
     * @return 增强后的 Prompt 字符串
     */
    private String buildAugmentedPrompt(String userQuery, List<String> context) {
        StringBuilder promptBuilder = new StringBuilder();
        // 系统指令部分 - 明确回答规则和引用要求
        promptBuilder.append("You are an AI assistant that answers questions based solely on the provided context.\n");
        promptBuilder.append("Follow these rules:\n");
        promptBuilder.append("1. Only use information from the provided context to answer the question.\n");
        promptBuilder.append("2. If the answer cannot be found in the context, respond with 'I don't have enough information to answer this question.'\n");
        promptBuilder.append("3. When referencing information from the context, cite the source number(s) in square brackets, e.g. [1].\n");
        promptBuilder.append("4. Keep your answer concise and relevant to the question.\n\n");

        // 上下文部分 - 添加编号和长度控制
        if (!context.isEmpty()) {
            promptBuilder.append("Context:\n");
            int maxContextLength = 5000; // 最大上下文字符数限制
            int currentLength = 0;
            int sourceNumber = 1;
            
            for (String ctx : context) {
                String numberedContext = String.format("[%d] %s\n", sourceNumber, ctx);
                // 检查是否超出长度限制
                if (currentLength + numberedContext.length() > maxContextLength) {
                    promptBuilder.append("[...] (Additional context truncated due to length limitations)\n");
                    break;
                }
                promptBuilder.append(numberedContext);
                currentLength += numberedContext.length();
                sourceNumber++;
            }
            promptBuilder.append("\n");
        } else {
            promptBuilder.append("Context: No relevant information available.\n\n");
        }

        // 问题和回答部分
        promptBuilder.append("Question: " + userQuery + "\n");
        promptBuilder.append("Answer: ");

        return promptBuilder.toString();
    }
}