import com.alibaba.fastjson.JSON;
import org.springframework.ai.chat.client.ChatClientRequest;
import org.springframework.ai.chat.client.ChatClientResponse;
import org.springframework.ai.chat.client.advisor.api.AdvisorChain;
import org.springframework.ai.chat.client.advisor.api.BaseAdvisor;
import org.springframework.ai.chat.client.advisor.api.CallAdvisorChain;
import org.springframework.ai.chat.messages.AssistantMessage;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.chat.prompt.PromptTemplate;
import org.springframework.ai.document.Document;
import org.springframework.ai.vectorstore.SearchRequest;
import org.springframework.ai.vectorstore.filter.Filter;
import org.springframework.ai.vectorstore.filter.FilterExpressionTextParser;
import org.springframework.util.StringUtils;

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

/**
 * @description: RAG 检索增强(使用多路召回+重排器)
 * @author: wxw
 * @date: 2025/9/12
 */
public class RagAnswerMilvusAdvisor implements BaseAdvisor {

    /**
     * 向量检索存储
     */
    private final MilvusVectorStoreV2 milvusVectorStoreV2;
    /**
     * 基础检索请求（包含 topK、过滤条件等），在 before 阶段会复制并补充 query 内容
     */
    private final SearchRequest searchRequest;
    /**
     * 基于检索到的文档，引导模型进行回答的提示词
     */
    private final String basePrompt;

    /**
     * 构造方法
     * @param milvusVectorStoreV2
     * @param searchRequest
     */
    public RagAnswerMilvusAdvisor(MilvusVectorStoreV2 milvusVectorStoreV2, SearchRequest searchRequest) {
        this.milvusVectorStoreV2 = milvusVectorStoreV2;
        this.searchRequest = searchRequest;
        this.basePrompt = "\nContext information is below, surrounded by ---------------------\n\n---------------------\n{question_answer_context}\n---------------------\n\nGiven the context and provided history information and not prior knowledge,\nreply to the user comment. If the answer is not in the context, inform\nthe user that you can't answer the question.\n";
    }

    /**
     * 在模型调用前执行：
     * 1. 从用户问题和上下文构造检索 query
     * 2. 使用多路召回检索相关文档
     * 3. 将文档内容拼接为上下文附加在用户问题之后
     * 4. 把检索结果放入上下文，供 after 使用
     */
    @Override
    public ChatClientRequest before(ChatClientRequest chatClientRequest, AdvisorChain advisorChain) {

        HashMap<String, Object> context = new HashMap(chatClientRequest.context());

        // 用户问题
        String userText = chatClientRequest.prompt().getUserMessage().getText();
        
        // 在用户问题后拼接 RAG 提示词
        String userText_ = userText + System.lineSeparator() + this.basePrompt;

        // 构造 query
        String query = (new PromptTemplate(userText)).render();

        // 构造 SearchRequest
        SearchRequest searchRequest = SearchRequest.from(this.searchRequest)
                .query(query)
                .filterExpression(this.doGetFilterExpression(context))
                .build();

        // 多路召回检索文档
        List<Document> documents = this.milvusVectorStoreV2.multiChannelRecallSearch(searchRequest, userText);

        // 将检索结果加入上下文
        context.put("qa_retrieved_documents", documents);

        // 拼接文档文本作为上下文
        String documentContext = documents.stream().map(Document::getText).collect(Collectors.joining(System.lineSeparator()));
        Map<String, Object> advisedUserParams = new HashMap(chatClientRequest.context());
        advisedUserParams.put("question_answer_context", documentContext);

        return ChatClientRequest.builder()
                .prompt(Prompt.builder().messages(new UserMessage(userText_), new AssistantMessage(JSON.toJSONString(advisedUserParams))).build())
                .context(advisedUserParams)
                .build();
    }

    /**
     * 在模型返回后执行：
     * 将检索到的文档集合添加到 ChatResponse 中的元数据 metadata，
     */
    @Override
    public ChatClientResponse after(ChatClientResponse chatClientResponse, AdvisorChain advisorChain) {
        ChatResponse.Builder chatResponseBuilder = ChatResponse.builder().from(chatClientResponse.chatResponse());
        chatResponseBuilder.metadata("qa_retrieved_documents", chatClientResponse.context().get("qa_retrieved_documents"));
        ChatResponse chatResponse = chatResponseBuilder.build();

        return ChatClientResponse.builder()
                .chatResponse(chatResponse)
                .context(chatClientResponse.context())
                .build();
    }

    /**
     * 同步调用链路：先执行 before 进行检索增强，再执行实际调用，最后执行 after 附加 metadata
     */
    @Override
    public ChatClientResponse adviseCall(ChatClientRequest chatClientRequest, CallAdvisorChain callAdvisorChain) {
        ChatClientResponse chatClientResponse = callAdvisorChain.nextCall(this.before(chatClientRequest, callAdvisorChain));
        return this.after(chatClientResponse, callAdvisorChain);
    }

    @Override
    public int getOrder() {
        return 0;
    }

    /**
     * 获取过滤表达式：
     * - 先判断上下文中是否包含"qa_filter_expression"这个键，并且该键对应的值转换为字符串后有实际文本内容
     * - 如果满足上述条件：
     * - 使用FilterExpressionTextParser的parse方法解析"qa_filter_expression"对应的值，并返回解析结果
     * - 否则返回当前对象的 searchRequest 属性中的过滤表达式
     */
    protected Filter.Expression doGetFilterExpression(Map<String, Object> context) {
        return context.containsKey("qa_filter_expression") && StringUtils.hasText(context.get("qa_filter_expression").toString())
                ? (new FilterExpressionTextParser()).parse(context.get("qa_filter_expression").toString())
                : this.searchRequest.getFilterExpression();
    }
}

