package com.fy.fyai.service.impl;

import com.fy.fyai.common.result.R;
import com.fy.fyai.model.entity.RagConfig;
import com.fy.fyai.service.RagConfigService;
import com.fy.fyai.service.RagService;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.ai.document.Document;
import org.springframework.ai.reader.tika.TikaDocumentReader;
import org.springframework.ai.transformer.splitter.TokenTextSplitter;
import org.springframework.ai.vectorstore.SearchRequest;
import org.springframework.ai.vectorstore.pgvector.PgVectorStore;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;

import java.time.LocalDateTime;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

import static com.fy.fyai.common.result.R.ok;

/**
 * @author fwq
 * @description RAG服务接口实现
 * @date 2025-10-14 22:39
 */
@Slf4j
@Service
public class RagServiceImpl implements RagService {

    @Resource
    private TokenTextSplitter tokenTextSplitter;

    @Resource
    private PgVectorStore pgVectorStore;

    @Resource
    private RagConfigService ragConfigService;

    /**
     * 上传知识库文件
     *
     * @param ragTag 知识库标签
     * @param files  知识库文件
     * @return 上传结果
     */
    @Override
    @Transactional
    public R uploadFile(String ragTag, List<MultipartFile> files) {
        log.info("上传知识库开始 {}", ragTag);
        Map<String, Map<String, String>> resDataMap = new HashMap<>();
        for (MultipartFile file : files) {
            TikaDocumentReader documentReader = new TikaDocumentReader(file.getResource());

            List<Document> documents = documentReader.get();

            documents.forEach(doc -> doc.getMetadata().put("ragtag", ragTag)); //TODO 巨坑："myrag"这个必须全部小写，不能是"myRag"这种，否则后面搜不到
            List<Document> documentSplitterList = tokenTextSplitter.apply(documents);

            documentSplitterList.forEach(doc -> doc.getMetadata().put("ragtag", ragTag));

            HashMap<String, String> fileResDataMap = new LinkedHashMap<>();
            for (int i = 0; i < documentSplitterList.size(); i++) {
                int index = i + 1;
                fileResDataMap.put("解析内容" + index, documentSplitterList.get(i).getText());
            }


            resDataMap.put("文件--" + file.getOriginalFilename() + "的解析", fileResDataMap);
            pgVectorStore.doAdd(documentSplitterList);

            // 检查是否已存在相同的RAG配置
            RagConfig existingConfig = ragConfigService.lambdaQuery()
                    .eq(RagConfig::getRagTag, ragTag)
                    .one();

            if (existingConfig == null) {
                // 创建新的RAG配置
                RagConfig ragConfig = RagConfig.builder()
                        .ragTag(ragTag)
                        .enabled(1)  // 启用状态
                        .deleted(0)  // 未删除
                        .createTime(LocalDateTime.now())
                        .updateTime(LocalDateTime.now())
                        .description("知识库标签:" + ragTag + "的描述")
                        .creator("fwq")
                        .knowledgeName(file.getOriginalFilename())
                        .build();

                ragConfigService.save(ragConfig);
            }
        }
        return ok(resDataMap);
    }

    /**
     * 获取知识库标签
     *
     * @param msg    用户输入
     * @param ragTag 知识库标签
     * @return 知识库标签列表
     */
    @Override
    public Map getTags(String msg, String ragTag) {
        // 指定文档搜索
        SearchRequest request = SearchRequest.builder()
                .query(msg)
                .topK(5)
                .filterExpression("ragtag == '" + ragTag + "'")
                .build();

        List<Document> documents = pgVectorStore.similaritySearch(request);

        // 返回前2条结果
        List<Document> top2Results = documents.stream()
                .limit(2)
                .collect(Collectors.toList());

        Map<String, String> resMap = new LinkedHashMap<>();
        for (int i = 0; i < top2Results.size(); i++) {
            resMap.put("内容" + (i + 1), top2Results.get(i).getText());
        }
        return resMap;
    }
}
