package com.lucifer.hawkeye.ai.service.impl;

import com.lucifer.hawkeye.ai.common.response.RestResult;
import com.lucifer.hawkeye.ai.domain.conversation.structured.TopN;
import com.lucifer.hawkeye.ai.rag.HawkeyeRagEngine;
import com.lucifer.hawkeye.ai.rag.RagEngine;
import com.lucifer.hawkeye.ai.service.ChatApiService;
import com.lucifer.hawkeye.ai.service.RagService;
import com.lucifer.hawkeye.ai.vector.HawkeyeVectorStoreEngine;
import com.lucifer.hawkeye.ai.vector.VectorStoreEngine;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import jakarta.annotation.Resource;
import lombok.SneakyThrows;
import org.apache.commons.lang3.StringUtils;
import org.springframework.ai.document.Document;
import org.springframework.ai.document.id.RandomIdGenerator;
import org.springframework.ai.reader.tika.TikaDocumentReader;
import org.springframework.ai.transformer.splitter.TokenTextSplitter;
import org.springframework.ai.vectorstore.VectorStore;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;

import java.util.Date;
import java.util.List;
import java.util.Map;

/**
 * @author lucifer
 * @date 2025/4/1 15:46
 */
@Service
public class RagServiceImpl implements RagService {


    @Resource(name = "pgJdbcTemplate")
    private JdbcTemplate pgJdbcTemplate;



    @Resource
    private TokenTextSplitter tokenTextSplitter;

    @Resource
    private ChatApiService chatApiService;



    @Resource(name = "docVectorStore")
    private VectorStore docVectorStore;


    @Override
    public String write(MultipartFile file) {
        String vectorDocId = writeDocumentByTika(file);
        String id = new RandomIdGenerator().generateId();
        Map<String, String> topN = topN2(file);
        String docName = file.getOriginalFilename() + "[热词推荐：" + topN.get("keyWordTop1") + "、" + topN.get("keyWordTop2") + "、" + topN.get("keyWordTop3") + "]";
        pgJdbcTemplate.update("insert into base_doc(id,doc_id,create_time,update_time,doc_type,doc_name) values(?,?,?,?,?,?)", id, vectorDocId, new Date(), new Date(),file.getContentType(),docName);
        return vectorDocId;
    }

    @Override
    public RestResult search(String conversationId, String ragType, String content, String tableName) {
        VectorStoreEngine storeEngine = HawkeyeVectorStoreEngine.builder().vectorStore(docVectorStore).build();
        RagEngine ragEngine = HawkeyeRagEngine.builder().vectorStoreEngine(storeEngine).build();
        return RestResult.buildSuccessResult(ragEngine.similaritySearch(conversationId, ragType, content, tableName));
    }


    @SneakyThrows
    private String writeDocumentByTika(MultipartFile file) {
        String filename = file.getOriginalFilename();
        String name = StringUtils.substringBefore(filename, ".");
        String contentType = file.getContentType();
        TikaDocumentReader tikaDocumentReader = new TikaDocumentReader(file.getResource());
        List<Document> documents = tikaDocumentReader.read();
        List<Document> applyDocuments = tokenTextSplitter.apply(documents);
        List<Document> writeDocuments = Lists.newArrayList();
        for (Document document : applyDocuments) {
            Map<String, Object> metadata = document.getMetadata();
            String text = document.getText();
            TopN topN = chatApiService.topN("获取这个文本出现前三的热词", text);
            metadata.put("id", document.getId());
            metadata.put("name", name);
            metadata.put("contentType", contentType);
            metadata.put("keyWordTop1", topN.keyWordTop1());
            metadata.put("keyWordTop2", topN.keyWordTop2());
            metadata.put("keyWordTop3", topN.keyWordTop3());
            Document writeDocument = Document.builder()
                    .id(document.getId())
                    .text(text)
                    .media(document.getMedia())
                    .metadata(document.getMetadata())
                    .score(document.getScore())
                    .build();
            writeDocuments.add(writeDocument);
        }
        docVectorStore.accept(writeDocuments);
        return documents.get(0).getId();
    }


    private Map<String, String> topN2(MultipartFile file) {
        TikaDocumentReader tikaDocumentReader = new TikaDocumentReader(file.getResource());
        List<Document> documents = tikaDocumentReader.read();
        Map< String, String> topMap = Maps.newLinkedHashMap();
        TopN topN = null;
        for (Document document : documents) {
            String text = document.getText();
            topN = chatApiService.topN("获取这个文本出现前三的热词", text);
        }
        topMap.put("keyWordTop1",topN.keyWordTop1());
        topMap.put("keyWordTop2",topN.keyWordTop2());
        topMap.put("keyWordTop3",topN.keyWordTop3());
        return topMap;
    }
}
