package org.ruoyi.knowledge.service.impl;

import cn.hutool.core.util.RandomUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import lombok.RequiredArgsConstructor;
import org.ruoyi.common.core.domain.model.LoginUser;
import org.ruoyi.common.core.utils.MapstructUtils;
import org.ruoyi.common.core.utils.StringUtils;
import org.ruoyi.common.mybatis.core.page.PageQuery;
import org.ruoyi.common.mybatis.core.page.TableDataInfo;
import org.ruoyi.common.satoken.utils.LoginHelper;
import org.ruoyi.knowledge.domain.KnowledgeAttach;
import org.ruoyi.knowledge.domain.KnowledgeInfo;
import org.ruoyi.knowledge.domain.bo.KnowledgeInfoBo;
import org.ruoyi.knowledge.domain.req.KnowledgeInfoUploadRequest;
import org.ruoyi.knowledge.domain.vo.KnowledgeInfoVo;
import org.ruoyi.knowledge.mapper.KnowledgeAttachMapper;
import org.ruoyi.knowledge.mapper.KnowledgeFragmentMapper;
import org.ruoyi.knowledge.mapper.KnowledgeInfoMapper;
import org.ruoyi.knowledge.service.EmbeddingService;
import org.ruoyi.knowledge.service.IKnowledgeInfoService;
import org.ruoyi.knowledge.service.RagflowService;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;

import java.util.*;

/**
 * 知识库Service业务层处理
 *
 * @author Lion Li
 * @date 2024-10-21
 */
@Service
@RequiredArgsConstructor
public class KnowledgeInfoServiceImpl implements IKnowledgeInfoService {

    private final KnowledgeInfoMapper baseMapper;

    private final EmbeddingService embeddingService;

    private final KnowledgeFragmentMapper fragmentMapper;

    private final KnowledgeAttachMapper attachMapper;

    private final RagflowService ragflowService;

    /**
     * 查询知识库
     */
    @Override
    public KnowledgeInfoVo queryById(Long id){
        return baseMapper.selectVoById(id);
    }

    /**
     * 查询知识库列表
     */
    @Override
    public TableDataInfo<KnowledgeInfoVo> queryPageList(KnowledgeInfoBo bo, PageQuery pageQuery) {
        LambdaQueryWrapper<KnowledgeInfo> lqw = buildQueryWrapper(bo);
        Page<KnowledgeInfoVo> result = baseMapper.selectVoPage(pageQuery.build(), lqw);
        return TableDataInfo.build(result);
    }

    /**
     * 查询知识库列表
     */
    @Override
    public List<KnowledgeInfoVo> queryList(KnowledgeInfoBo bo) {
        LambdaQueryWrapper<KnowledgeInfo> lqw = buildQueryWrapper(bo);
        return baseMapper.selectVoList(lqw);
    }

    private LambdaQueryWrapper<KnowledgeInfo> buildQueryWrapper(KnowledgeInfoBo bo) {
        LambdaQueryWrapper<KnowledgeInfo> lqw = Wrappers.lambdaQuery();
        lqw.eq(StringUtils.isNotBlank(bo.getKid()), KnowledgeInfo::getKid, bo.getKid());
        lqw.eq(bo.getUid() != null, KnowledgeInfo::getUid, bo.getUid());
        lqw.like(StringUtils.isNotBlank(bo.getKname()), KnowledgeInfo::getKname, bo.getKname());
        lqw.eq(StringUtils.isNotBlank(bo.getDescription()), KnowledgeInfo::getDescription, bo.getDescription());
        // 查询公开的知识库
        lqw.or(wrapper -> wrapper.eq(KnowledgeInfo::getShare, "1"));
        return lqw;
    }

    /**
     * 修改知识库
     */
    @Override
    public Boolean updateByBo(KnowledgeInfoBo bo) {
        KnowledgeInfo update = MapstructUtils.convert(bo, KnowledgeInfo.class);
        validEntityBeforeSave(update);
        return baseMapper.updateById(update) > 0;
    }

    /**
     * 保存前的数据校验
     */
    private void validEntityBeforeSave(KnowledgeInfo entity){
        //TODO 做一些数据校验,如唯一约束
    }

    @Override
    @Transactional(rollbackFor = Exception.class)
    public void saveOne(KnowledgeInfoBo bo) {
        KnowledgeInfo knowledgeInfo = MapstructUtils.convert(bo, KnowledgeInfo.class);
        if (StringUtils.isBlank(bo.getKid())){
            String kid = RandomUtil.randomString(10);
            if (knowledgeInfo != null) {
                knowledgeInfo.setKid(kid);
                knowledgeInfo.setUid(LoginHelper.getLoginUser().getUserId());
            }
            baseMapper.insert(knowledgeInfo);
            // 注释掉：现在使用 RAGFlow，不再需要本地向量库 schema
            // if (knowledgeInfo != null && knowledgeInfo.getId() != null) {
            //     embeddingService.createSchema(String.valueOf(knowledgeInfo.getId()));
            // }
            
            // 创建知识库时，只创建空的数据集（使用默认配置）
            // 具体的处理参数（文本块大小、分隔符等）在上传附件时再配置
            try {
                System.out.println("========== 创建知识库 ==========");
                System.out.println("知识库名称: " + bo.getKname());
                System.out.println("是否公开: " + bo.getShare());
                System.out.println("====================================");
                
                // 使用默认配置创建RAGFlow数据集（使用RAGFlow服务器的默认embedding模型）
                String chunkMethod = "naive";
                String permission = getPermission(bo.getShare());
                
                // 创建空的数据集（不设置embedding_model和parser_config，使用RAGFlow服务器默认配置）
                String datasetId = ragflowService.createDataset(bo.getKname(), null, chunkMethod, null, permission);
                
                if (datasetId != null && !datasetId.isEmpty() && knowledgeInfo != null) {
                    knowledgeInfo.setRagflowDatasetId(datasetId);
                    baseMapper.updateById(knowledgeInfo);
                    System.out.println("✓ 创建Ragflow数据集成功，datasetId: " + datasetId);
                } else {
                    System.out.println("✗ 创建Ragflow数据集失败");
                }
            } catch (Exception e) {
                System.err.println("创建Ragflow数据集失败: " + e.getMessage());
                e.printStackTrace();
                // 不阻止知识库创建
            }
            
            System.out.println("知识库创建成功，kid: " + kid);
        }else {
            baseMapper.updateById(knowledgeInfo);
        }
    }

    @Override
    public void upload(KnowledgeInfoUploadRequest request) {
        // 上传附件时，如果有配置参数，先更新知识库配置
        if (request.getTextBlockSize() != null || request.getVectorModel() != null 
            || StringUtils.isNotBlank(request.getChunkingMethod())) {
            updateKnowledgeConfig(request);
        }
        storeContent(request.getFile(), request.getKid(), request.getChunkingMethod());
    }
    
    /**
     * 更新知识库配置（如果上传时传入了处理参数）
     */
    private void updateKnowledgeConfig(KnowledgeInfoUploadRequest request) {
        try {
            System.out.println("========== 开始更新知识库配置 ==========");
            System.out.println("接收到的参数:");
            System.out.println("  kid: " + request.getKid());
            System.out.println("  separator: " + request.getSeparator());
            System.out.println("  textBlockSize: " + request.getTextBlockSize());
            System.out.println("  overlapChar: " + request.getOverlapChar());
            System.out.println("  retrieveLimit: " + request.getRetrieveLimit());
            System.out.println("  questionSeparator: " + request.getQuestionSeparator());
            System.out.println("  vectorModel: " + request.getVectorModel());
            System.out.println("  chunkingMethod: " + request.getChunkingMethod());
            
            // 获取知识库信息
            LambdaQueryWrapper<KnowledgeInfo> wrapper = Wrappers.lambdaQuery();
            wrapper.eq(KnowledgeInfo::getKid, request.getKid());
            KnowledgeInfo knowledgeInfo = baseMapper.selectOne(wrapper);
            
            if (knowledgeInfo == null) {
                System.err.println("知识库不存在，kid: " + request.getKid());
                return;
            }
            
            System.out.println("找到知识库: " + knowledgeInfo.getKname());
            System.out.println("RAGFlow数据集ID: " + knowledgeInfo.getRagflowDatasetId());
            
            // 更新配置参数
            if (request.getTextBlockSize() != null) {
                knowledgeInfo.setTextBlockSize(request.getTextBlockSize());
                System.out.println("更新文本块大小: " + request.getTextBlockSize());
            }
            if (request.getVectorModel() != null) {
                knowledgeInfo.setVectorModel(request.getVectorModel());
                System.out.println("更新向量模型: " + request.getVectorModel());
            }
            if (StringUtils.isNotBlank(request.getSeparator())) {
                knowledgeInfo.setKnowledgeSeparator(request.getSeparator());
                System.out.println("更新分隔符: " + request.getSeparator());
            }
            if (request.getOverlapChar() != null) {
                knowledgeInfo.setOverlapChar(request.getOverlapChar());
                System.out.println("更新重叠字符: " + request.getOverlapChar());
            }
            if (request.getRetrieveLimit() != null) {
                knowledgeInfo.setRetrieveLimit(request.getRetrieveLimit());
                System.out.println("更新检索条数: " + request.getRetrieveLimit());
            }
            if (StringUtils.isNotBlank(request.getQuestionSeparator())) {
                knowledgeInfo.setQuestionSeparator(request.getQuestionSeparator());
                System.out.println("更新提问分隔符: " + request.getQuestionSeparator());
            }
            if (StringUtils.isNotBlank(request.getChunkingMethod())) {
                knowledgeInfo.setChunkingMethod(request.getChunkingMethod());
                System.out.println("更新切片方法: " + request.getChunkingMethod());
            }
            
            // 保存到数据库
            baseMapper.updateById(knowledgeInfo);
            System.out.println("✓ 知识库配置已保存到数据库");
            
            // 如果有RAGFlow数据集ID，更新数据集的parser_config
            if (StringUtils.isNotBlank(knowledgeInfo.getRagflowDatasetId())) {
                System.out.println("准备更新RAGFlow数据集配置...");
                updateRagflowDatasetConfig(knowledgeInfo);
            } else {
                System.err.println("⚠ RAGFlow数据集ID为空，跳过更新");
            }
            
            System.out.println("========== 知识库配置更新完成 ==========");
        } catch (Exception e) {
            System.err.println("❌ 更新知识库配置失败: " + e.getMessage());
            e.printStackTrace();
        }
    }
    
    /**
     * 更新RAGFlow数据集的parser_config
     */
    private void updateRagflowDatasetConfig(KnowledgeInfo knowledgeInfo) {
        try {
            Map<String, Object> parserConfig = buildParserConfigFromKnowledgeInfo(knowledgeInfo);
            String datasetId = knowledgeInfo.getRagflowDatasetId();
            
            System.out.println("构建的RAGFlow parser_config: " + parserConfig);
            System.out.println("数据集ID: " + datasetId);
            
            if (datasetId != null && !parserConfig.isEmpty()) {
                Map<String, Object> updateData = new HashMap<>();
                updateData.put("parser_config", parserConfig);
                
                System.out.println("发送更新请求，数据: " + updateData);
                boolean success = ragflowService.updateDataset(datasetId, updateData);
                if (success) {
                    System.out.println("✓✓✓ RAGFlow数据集配置已更新: " + parserConfig);
                } else {
                    System.err.println("✗✗✗ 更新RAGFlow数据集配置失败");
                }
            } else if (parserConfig.isEmpty()) {
                System.err.println("⚠ parser_config为空，没有可更新的配置");
            }
        } catch (Exception e) {
            System.err.println("❌ 更新RAGFlow数据集配置异常: " + e.getMessage());
            e.printStackTrace();
        }
    }
    
    /**
     * 从KnowledgeInfo构建parser_config
     */
    private Map<String, Object> buildParserConfigFromKnowledgeInfo(KnowledgeInfo knowledgeInfo) {
        Map<String, Object> parserConfig = new HashMap<>();
        
        if (knowledgeInfo.getTextBlockSize() != null && knowledgeInfo.getTextBlockSize() > 0) {
            parserConfig.put("chunk_token_num", knowledgeInfo.getTextBlockSize());
        }
        
        if (StringUtils.isNotBlank(knowledgeInfo.getKnowledgeSeparator())) {
            String delimiter = knowledgeInfo.getKnowledgeSeparator().replace("\\n", "\n");
            parserConfig.put("delimiter", delimiter);
        }
        
        // 注意：RAGFlow API 目前不支持 chunk_method 字段
        // 切片方法需要在创建数据集或上传文档时通过其他参数指定
        // if (StringUtils.isNotBlank(knowledgeInfo.getChunkingMethod())) {
        //     parserConfig.put("chunk_method", knowledgeInfo.getChunkingMethod());
        // }
        
        return parserConfig;
    }

    public void storeContent(MultipartFile file, String kid, String chunkingMethod) {
        String fileName = file.getOriginalFilename();
        KnowledgeAttach knowledgeAttach = new KnowledgeAttach();
        knowledgeAttach.setKid(kid);
        String docId = RandomUtil.randomString(10);
        knowledgeAttach.setDocId(docId);
        knowledgeAttach.setDocName(fileName);
        knowledgeAttach.setDocType(fileName.substring(fileName.lastIndexOf(".")+1));
        
        // 检查文件是否已存在
        LambdaQueryWrapper<KnowledgeAttach> queryWrapper = Wrappers.lambdaQuery();
        queryWrapper.eq(KnowledgeAttach::getKid, kid)
                   .eq(KnowledgeAttach::getDocName, fileName);
        long count = attachMapper.selectCount(queryWrapper);
        if (count > 0) {
            System.err.println("文件已存在，跳过上传: " + fileName);
            return;
        }
        
        // 先保存附件元数据到本地数据库
        knowledgeAttach.setContent(""); // 内容保存在Ragflow中
        knowledgeAttach.setCreateTime(new Date());
        knowledgeAttach.setEnable(1); // 默认启用
        attachMapper.insert(knowledgeAttach);
        System.out.println("附件元数据已保存: " + fileName);
        
        // 上传到Ragflow并获取document_id
        System.out.println("开始上传文件到Ragflow: " + fileName);
        try {
            // 获取知识库信息
            List<KnowledgeInfoVo> knowledgeInfoList = baseMapper.selectVoByMap(Map.of("kid", kid));
            if (knowledgeInfoList == null || knowledgeInfoList.isEmpty()) {
                System.err.println("知识库不存在: " + kid);
                return;
            }
            
            KnowledgeInfoVo knowledgeInfo = knowledgeInfoList.get(0);
            String datasetId = knowledgeInfo.getRagflowDatasetId();
            if (datasetId == null || datasetId.isEmpty()) {
                System.err.println("数据集ID为空，无法上传");
                return;
            }
            
            // 上传文件并获取document_id
            List<String> documentIds = ragflowService.uploadDocuments(datasetId, List.of(file));
            if (!documentIds.isEmpty()) {
                String ragflowDocumentId = documentIds.get(0);
                System.out.println("✓ 获取RAGFlow document_id: " + ragflowDocumentId);
                
                // TODO: 保存document_id到数据库（需要先添加字段）
                // knowledgeAttach.setRagflowDocumentId(ragflowDocumentId);
                // attachMapper.updateById(knowledgeAttach);
                
                // 解析文档
                boolean parseSuccess = ragflowService.parseDocuments(datasetId, documentIds);
                if (parseSuccess) {
                    System.out.println("✓ 文件已成功上传并解析到Ragflow");
                }
            } else {
                System.err.println("✗ 文件上传到Ragflow失败");
            }
        } catch (Exception e) {
            System.err.println("Ragflow上传异常: " + e.getMessage());
            e.printStackTrace();
        }
    }

    @Override
    @Transactional(rollbackFor = Exception.class)
    public void removeKnowledge(String id) {
        Map<String,Object> map = new HashMap<>();
        map.put("kid",id);
        List<KnowledgeInfoVo> knowledgeInfoList = baseMapper.selectVoByMap(map);
        check(knowledgeInfoList);
        
        // 删除Ragflow数据集（如果有）
        try {
            knowledgeInfoList.forEach(knowledgeInfoVo -> {
                try {
                    // 优先使用保存的dataset ID，如果没有则通过名称查找
                    String datasetId = knowledgeInfoVo.getRagflowDatasetId();
                    if (datasetId == null || datasetId.isEmpty()) {
                        System.out.println("本地未保存RAGFlow数据集ID，尝试通过名称查找: " + knowledgeInfoVo.getKname());
                        datasetId = ragflowService.getDatasetIdByName(knowledgeInfoVo.getKname());
                    }
                    
                    if (datasetId != null && !datasetId.isEmpty()) {
                        try {
                            boolean deleted = ragflowService.deleteDataset(datasetId);
                            if (deleted) {
                                System.out.println("✓ 成功删除RAGFlow数据集: " + knowledgeInfoVo.getKname() + " (ID: " + datasetId + ")");
                            } else {
                                System.out.println("✗ 删除RAGFlow数据集失败: " + knowledgeInfoVo.getKname() + " (ID: " + datasetId + ")");
                            }
                        } catch (Exception ex) {
                            System.out.println("⚠ RAGFlow数据集删除异常: " + knowledgeInfoVo.getKname() + " - " + ex.getMessage());
                            // 继续执行
                        }
                    } else {
                        System.out.println("ℹ 未找到RAGFlow数据集ID，跳过RAGFlow删除: " + knowledgeInfoVo.getKname());
                    }
                } catch (Exception e) {
                    System.out.println("⚠ RAGFlow删除过程中出现异常: " + e.getMessage());
                    // 继续执行
                }
            });
        } catch (Exception e) {
            System.out.println("⚠ RAGFlow删除过程异常: " + e.getMessage());
            // 继续执行本地删除
        }
        
        // 删除附件和知识片段（本地元数据）
        fragmentMapper.deleteByMap(map);
        attachMapper.deleteByMap(map);
        // 删除知识库
        baseMapper.deleteByMap(map);
        
        System.out.println("知识库删除完成，kid: " + id);
    }

    @Override
    public void check(List<KnowledgeInfoVo> knowledgeInfoList){
        LoginUser loginUser = LoginHelper.getLoginUser();
        for (KnowledgeInfoVo knowledgeInfoVo : knowledgeInfoList) {
            if(!knowledgeInfoVo.getUid().equals(loginUser.getUserId())){
                throw new SecurityException("权限不足");
            }
        }
    }


    /**
     * 上传文件到Ragflow
     * 
     * @param kid 知识库ID
     * @param file 文件
     * @return 是否成功
     */
    public boolean uploadToRagflow(String kid, MultipartFile file) {
        try {
            // 获取知识库信息
            List<KnowledgeInfoVo> knowledgeInfoList = baseMapper.selectVoByMap(Map.of("kid", kid));
            if (knowledgeInfoList == null || knowledgeInfoList.isEmpty()) {
                System.err.println("知识库不存在: " + kid);
                return false;
            }
            
            KnowledgeInfoVo knowledgeInfo = knowledgeInfoList.get(0);
            if (knowledgeInfo == null) {
                System.err.println("知识库信息为空: " + kid);
                return false;
            }
            
            // 获取或创建Ragflow数据集ID
            String datasetId = knowledgeInfo.getRagflowDatasetId();
            if (datasetId == null || datasetId.isEmpty()) {
                // 如果没有保存的ID，则创建或获取
                datasetId = getOrCreateRagflowDataset(kid, knowledgeInfo.getKname());
                if (datasetId == null) {
                    System.err.println("无法获取Ragflow数据集ID: " + kid);
                    return false;
                }
                // 保存到数据库
                knowledgeInfo.setRagflowDatasetId(datasetId);
                baseMapper.updateById(MapstructUtils.convert(knowledgeInfo, KnowledgeInfo.class));
                System.out.println("保存Ragflow数据集ID到数据库: " + datasetId);
            } else {
                System.out.println("使用已保存的Ragflow数据集ID: " + datasetId);
            }
            
            // 上传到Ragflow
            List<String> documentIds = ragflowService.uploadDocuments(datasetId, List.of(file));
            if (!documentIds.isEmpty()) {
                // 解析文档
                boolean parseSuccess = ragflowService.parseDocuments(datasetId, documentIds);
                if (parseSuccess) {
                    System.out.println("文件已成功上传并解析到Ragflow数据集: " + datasetId);
                }
                return parseSuccess;
            }
            return false;
        } catch (Exception e) {
            System.err.println("上传文件到Ragflow失败: " + e.getMessage());
            e.printStackTrace();
            return false;
        }
    }
    
    /**
     * 获取或创建Ragflow数据集
     */
    private String getOrCreateRagflowDataset(String kid, String datasetName) {
        try {
            // 先尝试获取已存在的数据集
            String existingDatasetId = ragflowService.getDatasetIdByName(datasetName);
            if (existingDatasetId != null) {
                System.out.println("找到已存在的Ragflow数据集: " + existingDatasetId);
                return existingDatasetId;
            }
            
            // 如果不存在，创建新的数据集
            String newDatasetId = ragflowService.createDataset(datasetName);
            if (newDatasetId != null) {
                System.out.println("创建新的Ragflow数据集: " + newDatasetId);
                return newDatasetId;
            }
            
            return null;
        } catch (Exception e) {
            System.err.println("获取或创建Ragflow数据集失败: " + e.getMessage());
            return null;
        }
    }

    /**
     * 检查Ragflow服务状态
     * 
     * @return 是否可用
     */
    public boolean checkRagflowStatus() {
        try {
            return ragflowService.isServiceAvailable();
        } catch (Exception e) {
            System.err.println("检查Ragflow服务状态失败: " + e.getMessage());
            return false;
        }
    }
    
    /**
     * 获取所有Ragflow数据集
     * 
     * @return 数据集列表
     */
    public List<Map<String, Object>> getAllRagflowDatasets() {
        try {
            return ragflowService.getAllDatasets();
        } catch (Exception e) {
            System.err.println("获取Ragflow数据集列表失败: " + e.getMessage());
            return new ArrayList<>();
        }
    }

    /**
     * 同步知识库与RAGFlow数据集
     * 将本地知识库的附件同步到RAGFlow数据集
     * 
     * @param kid 知识库ID
     * @return 是否成功
     */
    public boolean syncKnowledgeToRagflow(String kid) {
        try {
            // 1. 获取知识库信息
            List<KnowledgeInfoVo> knowledgeInfoList = baseMapper.selectVoByMap(Map.of("kid", kid));
            if (knowledgeInfoList == null || knowledgeInfoList.isEmpty()) {
                System.err.println("知识库不存在: " + kid);
                return false;
            }
            
            KnowledgeInfoVo knowledgeInfo = knowledgeInfoList.get(0);
            String datasetId = knowledgeInfo.getRagflowDatasetId();
            
            if (datasetId == null || datasetId.isEmpty()) {
                // 创建或获取数据集
                datasetId = getOrCreateRagflowDataset(kid, knowledgeInfo.getKname());
                if (datasetId != null) {
                    knowledgeInfo.setRagflowDatasetId(datasetId);
                    baseMapper.updateById(MapstructUtils.convert(knowledgeInfo, KnowledgeInfo.class));
                } else {
                    System.err.println("无法获取Ragflow数据集ID");
                    return false;
                }
            }
            
            // 2. 获取本地知识库的所有附件
            LambdaQueryWrapper<KnowledgeAttach> attachWrapper = Wrappers.lambdaQuery();
            attachWrapper.eq(KnowledgeAttach::getKid, kid);
            List<KnowledgeAttach> localAttaches = attachMapper.selectList(attachWrapper);
            
            // 3. 获取RAGFlow数据集中的所有文档
            List<Map<String, Object>> ragflowDocuments = ragflowService.getDocuments(datasetId);
            Set<String> ragflowDocNames = new HashSet<>();
            if (ragflowDocuments != null) {
                for (Map<String, Object> doc : ragflowDocuments) {
                    String docName = (String) doc.get("name");
                    if (docName != null) {
                        ragflowDocNames.add(docName);
                    }
                }
            }
            
            // 4. 对比差异并同步
            for (KnowledgeAttach attach : localAttaches) {
                if (!ragflowDocNames.contains(attach.getDocName())) {
                    System.out.println("检测到本地文件未同步到RAGFlow: " + attach.getDocName());
                    // 这里需要从存储中读取文件内容，可能需要额外的接口支持
                    // 当前实现中文件内容存储在RAGFlow，本地只存储元数据
                    System.out.println("文件 " + attach.getDocName() + " 已在RAGFlow中或需要重新上传");
                }
            }
            
            System.out.println("知识库同步完成: " + kid);
            return true;
        } catch (Exception e) {
            System.err.println("同步知识库到RAGFlow失败: " + e.getMessage());
            e.printStackTrace();
            return false;
        }
    }

    /**
     * 从RAGFlow同步数据集到本地知识库
     * 创建本地不存在的知识库记录
     * 
     * @return 同步的知识库数量
     */
    public int syncRagflowDatasetsToLocal() {
        try {
            // 1. 获取RAGFlow中的所有数据集
            List<Map<String, Object>> ragflowDatasets = ragflowService.getAllDatasets();
            if (ragflowDatasets == null || ragflowDatasets.isEmpty()) {
                System.out.println("RAGFlow中没有任何数据集");
                return 0;
            }
            
            System.out.println("发现 " + ragflowDatasets.size() + " 个RAGFlow数据集，开始同步...");
            
            int syncedCount = 0;
            for (Map<String, Object> dataset : ragflowDatasets) {
                try {
                    String datasetName = (String) dataset.get("name");
                    String datasetId = (String) dataset.get("id");
                    
                    if (datasetName == null || datasetId == null) {
                        continue;
                    }
                    
                    // 2. 检查本地是否已存在该数据集对应的知识库
                    LambdaQueryWrapper<KnowledgeInfo> wrapper = Wrappers.lambdaQuery();
                    wrapper.eq(KnowledgeInfo::getKname, datasetName);
                    KnowledgeInfo existingKnowledge = baseMapper.selectOne(wrapper);
                    
                    if (existingKnowledge != null) {
                        // 如果存在但ragflowDatasetId为空，则更新
                        if (existingKnowledge.getRagflowDatasetId() == null || existingKnowledge.getRagflowDatasetId().isEmpty()) {
                            existingKnowledge.setRagflowDatasetId(datasetId);
                            baseMapper.updateById(existingKnowledge);
                            System.out.println("更新知识库的RAGFlow ID: " + datasetName);
                        }
                        continue;
                    }
                    
                    // 3. 创建本地知识库记录
                    KnowledgeInfo newKnowledge = new KnowledgeInfo();
                    String kid = RandomUtil.randomString(10);
                    newKnowledge.setKid(kid);
                    newKnowledge.setKname(datasetName);
                    newKnowledge.setUid(LoginHelper.getLoginUser().getUserId());
                    newKnowledge.setDescription("从RAGFlow同步的数据集");
                    newKnowledge.setRagflowDatasetId(datasetId);
                    newKnowledge.setCreateTime(new Date());
                    newKnowledge.setCreateBy(LoginHelper.getLoginUser().getUsername());
                    
                    // 尝试从RAGFlow获取数据集文档数量（可选，失败也不影响同步）
                    try {
                        List<Map<String, Object>> documents = ragflowService.getDocuments(datasetId);
                        if (documents != null && !documents.isEmpty()) {
                            newKnowledge.setDescription("从RAGFlow同步，包含 " + documents.size() + " 个文档");
                        }
                    } catch (Exception e) {
                        System.out.println("无法获取数据集文档数量: " + datasetName + " - " + e.getMessage());
                        // 继续执行，不影响知识库创建
                    }
                    
                    baseMapper.insert(newKnowledge);
                    System.out.println("成功从RAGFlow同步知识库: " + datasetName + " (ID: " + datasetId + ")");
                    syncedCount++;
                    
                } catch (Exception e) {
                    System.err.println("同步数据集失败: " + dataset.get("name") + " - " + e.getMessage());
                }
            }
            
            System.out.println("同步完成，共同步 " + syncedCount + " 个知识库");
            return syncedCount;
            
        } catch (Exception e) {
            System.err.println("从RAGFlow同步数据集失败: " + e.getMessage());
            e.printStackTrace();
            return 0;
        }
    }

    /**
     * 强制删除RAGFlow中的所有数据集（危险操作，谨慎使用）
     * 
     * @return 删除的数据集数量
     */
    public int clearAllRagflowDatasets() {
        try {
            List<Map<String, Object>> datasets = ragflowService.getAllDatasets();
            if (datasets == null || datasets.isEmpty()) {
                return 0;
            }
            
            int deletedCount = 0;
            for (Map<String, Object> dataset : datasets) {
                String datasetId = (String) dataset.get("id");
                String datasetName = (String) dataset.get("name");
                
                if (datasetId != null) {
                    try {
                        if (ragflowService.deleteDataset(datasetId)) {
                            System.out.println("已删除RAGFlow数据集: " + datasetName);
                            deletedCount++;
                        }
                    } catch (Exception e) {
                        System.err.println("删除RAGFlow数据集失败: " + datasetName + " - " + e.getMessage());
                    }
                }
            }
            
            return deletedCount;
        } catch (Exception e) {
            System.err.println("清空RAGFlow数据集失败: " + e.getMessage());
            return 0;
        }
    }

    /**
     * 从前端参数构建RAGFlow的parser_config
     * 只包含前端表单中实际输入的字段
     */
    private Map<String, Object> buildParserConfig(KnowledgeInfoBo bo) {
        Map<String, Object> parserConfig = new HashMap<>();
        
        // 1. 文本块大小 -> chunk_token_num (前端表单: "文本块大小")
        if (bo.getTextBlockSize() != null && bo.getTextBlockSize() > 0) {
            parserConfig.put("chunk_token_num", bo.getTextBlockSize());
        }
        
        // 2. 知识分隔符 -> delimiter (前端表单: "分隔符")
        if (StringUtils.isNotBlank(bo.getKnowledgeSeparator())) {
            // 将前端传入的字符串转换为实际的分隔符
            String delimiter = bo.getKnowledgeSeparator().replace("\\n", "\n");
            parserConfig.put("delimiter", delimiter);
        }
        
        // 注意：html4excel、layout_recognize、raptor等字段如果前端表单中没有，
        // 则不添加到parserConfig中，让RAGFlow使用服务器默认值
        
        System.out.println("构建的RAGFlow配置（仅前端输入）: " + parserConfig);
        return parserConfig;
    }
    
    /**
     * 获取权限设置（根据是否公开）
     */
    private String getPermission(String share) {
        // RAGFlow只支持: "me" 或 "team"
        // "1" = 公开 -> "team", "0" = 私有 -> "me"
        return "1".equals(share) ? "team" : "me";
    }
    
    /**
     * 根据向量模型获取嵌入模型（RAGFlow格式: <model_name>@<provider>）
     * 如果vectorModel为空，返回null以使用RAGFlow服务器的默认embedding模型
     */
    private String getEmbeddingModel(String vectorModel) {
        if (StringUtils.isBlank(vectorModel)) {
            // 返回null使用RAGFlow服务器的默认embedding模型
            return null;
        }
        
        // 向量模型到嵌入模型的映射（RAGFlow格式）
        switch (vectorModel.toLowerCase()) {
            case "text-embedding-3-small":
                return "text-embedding-3-small@OpenAI"; // OpenAI格式
            case "text-embedding-ada-002":
                return "text-embedding-ada-002@OpenAI"; // OpenAI格式
            case "bge-large-zh-v1.5":
                return "BAAI/bge-large-zh-v1.5@BAAI"; // BAAI格式
            default:
                // 如果是不支持的模型，返回null使用RAGFlow默认配置
                return null;
        }
    }

    /**
     * 删除知识库附件（同时删除RAGFlow中的文档）
     * 
     * @param kid 知识库ID
     * @param docId 文档ID
     * @return 是否成功
     */
    public boolean removeAttach(String kid, String docId) {
        try {
            // 1. 获取附件信息
            LambdaQueryWrapper<KnowledgeAttach> wrapper = Wrappers.lambdaQuery();
            wrapper.eq(KnowledgeAttach::getKid, kid)
                   .eq(KnowledgeAttach::getDocId, docId);
            KnowledgeAttach attach = attachMapper.selectOne(wrapper);
            
            if (attach == null) {
                System.err.println("附件不存在: " + docId);
                return false;
            }
            
            // 2. 从RAGFlow中删除文档
            List<KnowledgeInfoVo> knowledgeInfoList = baseMapper.selectVoByMap(Map.of("kid", kid));
            if (knowledgeInfoList != null && !knowledgeInfoList.isEmpty()) {
                KnowledgeInfoVo knowledgeInfo = knowledgeInfoList.get(0);
                String datasetId = knowledgeInfo.getRagflowDatasetId();
                
                if (datasetId != null && !datasetId.isEmpty()) {
                    // 获取RAGFlow中的文档列表，查找匹配的文档ID
                    List<Map<String, Object>> documents = ragflowService.getDocuments(datasetId);
                    for (Map<String, Object> doc : documents) {
                        String docName = (String) doc.get("name");
                        if (attach.getDocName().equals(docName)) {
                            String documentId = (String) doc.get("id");
                            if (documentId != null) {
                                ragflowService.deleteDocument(datasetId, documentId);
                                System.out.println("已从RAGFlow中删除文档: " + documentId);
                            }
                            break;
                        }
                    }
                }
            }
            
            // 3. 从本地删除附件
            attachMapper.delete(wrapper);
            System.out.println("已从本地删除附件: " + attach.getDocName());
            
            return true;
        } catch (Exception e) {
            System.err.println("删除附件失败: " + e.getMessage());
            e.printStackTrace();
            return false;
        }
    }
}
