package com.sys.ai.service.impl;

import com.baomidou.mybatisplus.extension.conditions.update.LambdaUpdateChainWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.sys.ai.domain.AiEmbeddingDb;
import com.sys.ai.domain.AiKnowledge;
import com.sys.ai.domain.AiKnowledgeFile;
import com.sys.ai.domain.AiKnowledgeFragmentation;
import com.sys.ai.enums.SegmentTypeEnums;
import com.sys.ai.mapper.AiKnowledgeFileMapper;
import com.sys.ai.mapper.AiKnowledgeFragmentationMapper;
import com.sys.ai.service.*;
import com.sys.ai.vo.EmbeddingsVO;
import com.sys.common.exception.ServiceException;
import com.sys.common.utils.EnumUtil;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.loader.UrlDocumentLoader;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.data.document.parser.apache.pdfbox.ApachePdfBoxDocumentParser;
import dev.langchain4j.data.document.parser.apache.poi.ApachePoiDocumentParser;
import dev.langchain4j.data.document.splitter.*;
import dev.langchain4j.data.segment.TextSegment;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;

import jakarta.annotation.Resource;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.ThreadPoolExecutor;

/**
 * 知识文档Service业务层处理
 *
 * @author LGY
 * @date 2025-05-26
 */
@Slf4j
@Service
public class AiKnowledgeFileServiceImpl extends ServiceImpl<AiKnowledgeFileMapper, AiKnowledgeFile> implements IAiKnowledgeFileService {
    @Autowired
    private AiKnowledgeFileMapper aiKnowledgeFileMapper;

    @Resource(name = "myExecutor")
    private ThreadPoolExecutor executor;
    @Autowired
    private AiKnowledgeFragmentationMapper knowledgeFragmentationMapper;
    @Autowired
    private AiService aiService;
    @Autowired
    private IDbOptionService dbOptionService;
    @Autowired
    private IAiKnowledgeService knowledgeService;
    @Autowired
    private IAiEmbeddingDbService embeddingDbService;

    /**
     * 查询知识文档
     *
     * @param id 知识文档主键
     * @return 知识文档
     */
    @Override
    public AiKnowledgeFile selectAiKnowledgeFileById(String id) {
        return this.getById(id);
    }

    /**
     * 查询知识文档列表
     *
     * @param aiKnowledgeFile 知识文档
     * @return 知识文档
     */
    @Override
    public List<AiKnowledgeFile> selectAiKnowledgeFileList(AiKnowledgeFile aiKnowledgeFile) {
        return aiKnowledgeFileMapper.selectAiKnowledgeFileList(aiKnowledgeFile);
    }

    /**
     * 新增知识文档
     *
     * @param aiKnowledgeFile 知识文档
     * @return 结果
     */

    @Override
    @Transactional(rollbackFor = RuntimeException.class)
    public void insertAiKnowledgeFile(AiKnowledgeFile aiKnowledgeFile) {
        String fileType = aiKnowledgeFile.getFilePath().substring(aiKnowledgeFile.getFilePath().lastIndexOf(".") + 1);
        aiKnowledgeFile.setStatus(0)
                .setFileType(fileType);
        this.save(aiKnowledgeFile);
        // 对上传的文档内容做分片处理
        this.dealWithFile(aiKnowledgeFile);
    }

    /**
     * 文件内容切片向量化存储
     *
     * @param aiKnowledgeFile 知识文档
     */
    @Async("myExecutor")
    public void dealWithFile(AiKnowledgeFile aiKnowledgeFile) {
        executor.execute(() -> {
            try {
                AiKnowledge aiKnowledge = knowledgeService.getById(aiKnowledgeFile.getKnowledgeId());
                AiEmbeddingDb embeddingDb = embeddingDbService.getById(aiKnowledge.getEmbeddingDbId());
                Assert.notNull(embeddingDb, "向量数据库不存在");
                Document document = null;
                if (Objects.equals(aiKnowledgeFile.getFileType(), "pdf")) {
                    ApachePdfBoxDocumentParser parser = new ApachePdfBoxDocumentParser();
                    document = UrlDocumentLoader.load(aiKnowledgeFile.getFilePath(), parser);
                } else if (Arrays.asList("doc", "docx", "xls", "xlsx", "ppt", "pptx").contains(aiKnowledgeFile.getFileType())) {
                    ApachePoiDocumentParser parser = new ApachePoiDocumentParser();
                    document = UrlDocumentLoader.load(aiKnowledgeFile.getFilePath(), parser);
                } else if (Objects.equals(aiKnowledgeFile.getFileType(), "txt")) {
                    TextDocumentParser parser = new TextDocumentParser();
                    document = UrlDocumentLoader.load(aiKnowledgeFile.getFilePath(), parser);
                } else {
                    throw new ServiceException("不支持的文档类型");
                }
                SegmentTypeEnums segmentTypeEnums = EnumUtil.getEnumByCode(aiKnowledge.getSegmentType(), SegmentTypeEnums.class);
                Assert.notNull(segmentTypeEnums, "分片规则器不存在");
                DocumentSplitter splitter = null;
                switch (segmentTypeEnums) {
                    case sentence:
                        splitter = new DocumentBySentenceSplitter(aiKnowledge.getMaxSegment(), aiKnowledge.getMaxOverlap());
                        break;
                    case character:
                        splitter = new DocumentByCharacterSplitter(aiKnowledge.getMaxSegment(), aiKnowledge.getMaxOverlap());
                        break;
                    case line:
                        splitter = new DocumentByLineSplitter(aiKnowledge.getMaxSegment(), aiKnowledge.getMaxOverlap());
                        break;
                    case paragraph:
                        splitter = new DocumentByParagraphSplitter(aiKnowledge.getMaxSegment(), aiKnowledge.getMaxOverlap());
                        break;
                    case word:
                        splitter = new DocumentByWordSplitter(aiKnowledge.getMaxSegment(), aiKnowledge.getMaxOverlap());
                        break;
                    default:
                        throw new ServiceException("分片规则器不存在");
                }
                // 4. 分割文档
                List<TextSegment> segments = splitter.split(document);
                if (!CollectionUtils.isEmpty(segments)) {
                    List<AiKnowledgeFragmentation> aiKnowledgeFragmentationList = segments.stream().map(textSegment -> {
                        AiKnowledgeFragmentation aiKnowledgeFragmentation = new AiKnowledgeFragmentation();
                        aiKnowledgeFragmentation.setAiKnowledgeFile(aiKnowledgeFile.getId())
                                .setKnowledgeId(aiKnowledgeFile.getKnowledgeId())
                                .setFragmentation(textSegment.text())
                                .setCreateBy(aiKnowledgeFile.getCreateBy());
                        knowledgeFragmentationMapper.insert(aiKnowledgeFragmentation);
                        return aiKnowledgeFragmentation;
                    }).toList();
                    // 向量化存储
                    aiKnowledgeFragmentationList.forEach(item -> {
                        EmbeddingsVO embeddingsVO = aiService.generateQAToEmbeddings(item.getFragmentation(), aiKnowledge.getRankModelId());
                        item.setFragmentationEmbedding(embeddingsVO.getData().get(0).getEmbedding());
                    });
                    dbOptionService.saveAiKnowledge(aiKnowledgeFragmentationList, aiKnowledge.getVectorDb(), embeddingDb);
                    aiKnowledgeFile.setStatus(1);
                    aiKnowledgeFileMapper.updateById(aiKnowledgeFile);
                }
            } catch (Exception e) {
                log.error("解析文件失败:{}", e.getMessage());
                aiKnowledgeFile.setStatus(-1)
                        .setExceptionMessage(e.getMessage());
                aiKnowledgeFileMapper.updateById(aiKnowledgeFile);
            }
        });
    }

    /**
     * 修改知识文档
     *
     * @param aiKnowledgeFile 知识文档
     * @return 结果
     */
    @Override
    @Transactional(rollbackFor = RuntimeException.class)
    public void updateAiKnowledgeFile(AiKnowledgeFile aiKnowledgeFile) {
        this.updateById(aiKnowledgeFile);
    }

    /**
     * 批量删除知识文档
     *
     * @param ids 需要删除的知识文档主键
     * @return 结果
     */
    @Override
    @Transactional(rollbackFor = RuntimeException.class)
    public void deleteAiKnowledgeFileByIds(String[] ids) {
        baseMapper.deleteBatchIds(Arrays.asList(ids));
        // 删除分桶结果
        knowledgeFragmentationMapper.deleteBatchByFileIds(List.of(ids));
    }

    @Override
    @Transactional(rollbackFor = RuntimeException.class)
    public void deleteBatch(List<String> knowledgeIds) {
        new LambdaUpdateChainWrapper<>(baseMapper)
                .in(AiKnowledgeFile::getKnowledgeId, knowledgeIds)
                .remove();
    }
}