package tsj.ai.alibaba.data.analysis.service.processing;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.document.Document;
import org.springframework.util.Assert;
import reactor.core.publisher.Flux;
import tsj.ai.alibaba.data.analysis.dto.schema.SchemaDTO;
import tsj.ai.alibaba.data.analysis.service.datasource.DatasourceService;
import tsj.ai.alibaba.data.analysis.service.llm.LlmService;
import tsj.ai.alibaba.data.analysis.service.vectorstore.AgentVectorStoreService;
import tsj.ai.alibaba.data.analysis.prompt.PromptHelper;

import java.util.*;
import java.util.function.Consumer;
import java.util.stream.Collectors;

import static tsj.ai.alibaba.data.analysis.constant.Constant.TABLE;

/**
 * @author taoshujian
 * @version 1.0
 * @since 2025-11-14 11:23:19
 */
@Slf4j
@AllArgsConstructor
public abstract class AbstractQueryProcessingService  implements QueryProcessingService {

    private final LlmService llmService;
    private final DatasourceService datasourceService;

    protected abstract AgentVectorStoreService getVectorStoreService();

    public List<String> extractEvidences(String query, String agentId) {
        log.info("记录开始提取证据的日志信息,查询内容是: {}", query);
        Assert.notNull(agentId, "AgentId cannot be null");
        List<Document> evidenceDocuments = getVectorStoreService().getDocumentsForAgent(agentId, query, "evidence");

        List<String> evidences = evidenceDocuments.stream().map(Document::getText).collect(Collectors.toList());
        log.info("与向量库匹配结果 evidences: {}", evidences);
        return evidences;
    }

    public List<String> extractKeywords(String query){
        log.debug("Extracting keywords from query: {} ", query);

        //question-to-keywords.txt
        String prompt = PromptHelper.buildQueryToKeywordsPrompt(query);
        log.debug("Calling LLM for keyword extraction");
        String keywordString = llmService.callUser(prompt);
        ObjectMapper objectMapper = new ObjectMapper();
        List<String> keywords;
        try {
            keywords = objectMapper.readValue(keywordString,new TypeReference<List<String>>(){});
        } catch (JsonProcessingException e) {
            throw new RuntimeException(e);
        }
        return keywords;
    }

    //问题拓展
    @Override
    public Flux<ChatResponse> expandQuestion(String query, Consumer<List<String>> resultConsumer) {
        return null;
    }

    @Override
    public Flux<ChatResponse> rewriteStream(String query, String agentId, StringBuilder queryResultCollector) {
        return null;
    }

    /**
     * 处理查询中的时间表达式，将相对时间转换为具体时间
     * @param query 原始查询
     * @return 处理后的查询
     */
    private Flux<ChatResponse> processTimeExpressions(String query) {
        return  null;
    }

    private Flux<ChatResponse> select(String query, List<String> evidenceList, String agentId,
                                      Consumer<SchemaDTO> dtoConsumer) {
        return null;
    }

    private Flux<ChatResponse> fineSelect(SchemaDTO schemaDTO, String query, List<String> evidenceList,
                                          Consumer<SchemaDTO> dtoConsumer) {
        return null;
    }
}
