package cn.showcon.firstapp.service;

import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.Metadata;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.embedding.onnx.allminilml6v2.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.rag.content.Content;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.rag.query.Query;
import dev.langchain4j.store.embedding.*;
import dev.langchain4j.store.embedding.filter.Filter;
import dev.langchain4j.store.embedding.filter.comparison.IsEqualTo;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import dev.langchain4j.model.openai.OpenAiTokenizer;

import java.util.List;
import java.util.Map;

/**
 * @Author Xue Lanbin
 */
public class RagService {

    /**
     * 每块文档长度（按token算）
     */
    public static final int RAG_MAX_SEGMENT_SIZE_IN_TOKENS = 500;

    private int overlap = 100;

    private EmbeddingModel embeddingModel;

    private EmbeddingStore<TextSegment> embeddingStore;

    public RagService(EmbeddingModel embeddingModel, EmbeddingStore<TextSegment> embeddingStore) {
        this.embeddingModel = embeddingModel;
        this.embeddingStore = embeddingStore;
    }

    public static AllMiniLmL6V2EmbeddingModel createEmbeddingModel() {
        AllMiniLmL6V2EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
        return embeddingModel;
    }

    public static InMemoryEmbeddingStore<TextSegment> createEmbeddingStore() {
        InMemoryEmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
        return embeddingStore;
    }

    public boolean embedding(String knowledgeId, String knowledgeItemId, String content) {
        Metadata metadata = new Metadata();
        metadata.put("knowledge_id", knowledgeId);
        metadata.put("knowledge_item_id", knowledgeItemId);

        Document document = new Document(content, metadata);
        ingest(document, 100);
        return true;
    }

    public void ingest(Document document, int overlap) {
//        DocumentSplitter documentSplitter = DocumentSplitters.recursive(RAG_MAX_SEGMENT_SIZE_IN_TOKENS, overlap, new OpenAiTokenizer(GPT_3_5_TURBO));
        DocumentSplitter documentSplitter = DocumentSplitters.recursive(RAG_MAX_SEGMENT_SIZE_IN_TOKENS, overlap);
        EmbeddingStoreIngestor embeddingStoreIngestor = EmbeddingStoreIngestor.builder()
                .documentSplitter(documentSplitter)
                .embeddingModel(embeddingModel)
                .embeddingStore(embeddingStore)
                .build();
        embeddingStoreIngestor.ingest(document);
    }


    public EmbeddingStoreContentRetriever createContentRetriever(Filter filter, int maxResults, double minScore) {
        EmbeddingStoreContentRetriever embeddingStoreContentRetriever = EmbeddingStoreContentRetriever.builder()
                .embeddingStore(embeddingStore)
                .embeddingModel(embeddingModel)
                .maxResults(maxResults)
                .minScore(minScore)
                .filter(filter)
                .build();
        return embeddingStoreContentRetriever;
    }

    public static Filter createFilter(Map<String, String> metadataCond) {
        Filter filter = null;
        for (Map.Entry<String, String> entry : metadataCond.entrySet()) {
            String key = entry.getKey();
            String value = entry.getValue();
            if (null == filter) {
                filter = new IsEqualTo(key, value);
            } else {
                filter = filter.and(new IsEqualTo(key, value));
            }
        }
        return filter;
    }

    public static void main(String[] args) {
        AllMiniLmL6V2EmbeddingModel embeddingModel = RagService.createEmbeddingModel();
        InMemoryEmbeddingStore<TextSegment> embeddingStore = RagService.createEmbeddingStore();
        RagService ragService = new RagService(embeddingModel, embeddingStore);

        ragService.embedding("100", "1", "I love you");
        ragService.embedding("100", "1", "Alex To");
        ragService.embedding("100", "1", "Ben");

        System.out.println(embeddingStore.serializeToJson());

        Map<String, String> metadataCond = Map.of("knowledge_id", "100");
        Filter filter = RagService.createFilter(metadataCond);

        Query query = Query.from("Alex");

        System.out.println("-----");
        EmbeddingStoreContentRetriever contentRetriever = ragService.createContentRetriever(filter, 2, 0.6);
        List<Content> result = contentRetriever.retrieve(query);
        for (Content content : result) {
            System.out.println(content.textSegment().text());
        }

        System.out.println("-----");
        Embedding embeddingQuery = embeddingModel.embed(query.text()).content();
        EmbeddingSearchRequest embeddingSearchRequest = EmbeddingSearchRequest.builder()
                .queryEmbedding(embeddingQuery)
                .maxResults(2)
                .minScore(0.6)
                .filter(filter)
                .build();
        EmbeddingSearchResult<TextSegment> embeddingSearchResult = embeddingStore.search(embeddingSearchRequest);
        for (EmbeddingMatch<TextSegment> item : embeddingSearchResult.matches()) {
            System.out.println(String.format("%s, %s, %s", item.embeddingId(), item.score(), item.embedded().text()));
        }

    }
}
