package com.lm.langchain4j.service;

import com.lm.langchain4j.adapter.ChatModelAdapter;
import com.lm.langchain4j.adapter.EmbeddingModelAdapter;
import com.lm.langchain4j.adapter.InMemoryVectorStore;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import org.springframework.stereotype.Service;

import java.util.List;
import java.util.stream.Collectors;

@Service
public class QAService {

    private final InMemoryVectorStore vectorStore;
    private final EmbeddingModelAdapter embeddingAdapter;
    private final ChatModelAdapter chatAdapter;

    public QAService(InMemoryVectorStore vectorStore,
                     EmbeddingModel embeddingModel, OpenAiChatModel chatModel) {
        this.vectorStore = vectorStore;
        this.embeddingAdapter = new EmbeddingModelAdapter(embeddingModel);
        this.chatAdapter = new ChatModelAdapter(chatModel);
    }

    public String ask(String question, int topK) {
        // 向量化问题
        List<double[]> qvec = embeddingAdapter.embedTexts(List.of(question));
        double[] q = qvec.get(0);

        // 检索topK
        List<InMemoryVectorStore.SearchResult> results = vectorStore.search(q, topK);

        String context = results.stream()
                .map(r -> String.format("[score=%.4f]\n%s\n", r.score(), r.text()))
                .collect(Collectors.joining("\n"));

        String prompt = "下面是相关的知识片段：\n" + context +
                "\n请基于以上片段回答用户问题（如无明确答案，请明确说明）：\n问题：" + question;

        // 调用 LLM 生成答案
        return chatAdapter.generate(prompt);
    }
}
