package com.ruoyi.admin.service.impl;

import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.lang.tree.Tree;
import cn.hutool.core.lang.tree.TreeNode;
import cn.hutool.core.lang.tree.TreeUtil;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
import com.ruoyi.admin.domain.LlmDatasets;
import com.ruoyi.admin.mapper.LlmDatasetsMapper;
import com.ruoyi.admin.service.ILlmDatasetsService;
import com.ruoyi.common.constant.MilvusConstants;
import com.ruoyi.common.exception.ValidException;
import com.ruoyi.common.utils.DateUtils;
import com.ruoyi.common.utils.MilvusUtils;
import com.ruoyi.common.utils.StpSystemUtil;
import com.ruoyi.common.utils.StringUtils;
import com.ruoyi.common.utils.uuid.IdUtils;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import java.util.HashMap;
import java.util.List;
import java.util.Map;

@Service
@RequiredArgsConstructor
public class LlmDatasetServiceImpl implements ILlmDatasetsService {


    private final LlmDatasetsMapper llmDatasetsMapper;

    @Override
    @Transactional(rollbackFor = Exception.class)
    public boolean creatDataset(LlmDatasets datasets) {
        datasets.setId(IdUtils.fastSimpleUUID());
        datasets.setCreateTime(DateUtils.getTime());
        datasets.setCreateBy(StpSystemUtil.getSysUser().getUsername());
        //根节点
        if ("0".equals(datasets.getParentId())) {
            QueryWrapper<LlmDatasets> queryWrapper = new QueryWrapper<>();
            queryWrapper.eq("parent_id", "0");
            queryWrapper.eq("dataset_code", datasets.getDatasetCode());
            Long l = llmDatasetsMapper.selectCount(queryWrapper);
            if (l > 0) {
                throw new ValidException("知识库编码已存在");
            }
            //使用datasetCode 当作分区名称
            MilvusUtils.createPartition(MilvusConstants.VECTOR_DOCUMENT_SEGMENT, datasets.getDatasetCode());
            llmDatasetsMapper.insert(datasets);
            return true;
            //子节点时候
        } else {
            llmDatasetsMapper.insert(datasets);
            return true;
        }
    }

    @Override
    public List<LlmDatasets> getDatasets(String datasetName, String categoryCode, String keywords) {
        QueryWrapper<LlmDatasets> llmDatasetsQueryWrapper = new QueryWrapper<>();
        llmDatasetsQueryWrapper.eq(StringUtils.isNotBlank(datasetName), "dataset_code", datasetName);
        llmDatasetsQueryWrapper.eq(StringUtils.isNotBlank(categoryCode), "category_code", datasetName);

        // 添加关键词模糊搜索：匹配 dataset_name 字段
        if (StringUtils.isNotBlank(keywords)) {
            llmDatasetsQueryWrapper.like("name", keywords);
        }

        //查询所有第一层的知识库
        llmDatasetsQueryWrapper.eq("parent_id", "0");
        llmDatasetsQueryWrapper.orderBy(true, true, "sort,create_time");

        return llmDatasetsMapper.selectList(llmDatasetsQueryWrapper);
    }

    @Override
    public boolean updateDataset(LlmDatasets datasets) {
        if (StringUtils.isBlank(datasets.getId())) {
            throw new ValidException("修改失败");
        }
        UpdateWrapper<LlmDatasets> llmDatasetsUpdateWrapper = new UpdateWrapper<>();
        llmDatasetsUpdateWrapper.eq("id", datasets.getId());
        llmDatasetsUpdateWrapper.set("update_time", DateUtils.getTime());
        llmDatasetsUpdateWrapper.set("label_data", datasets.getLabelData());
        llmDatasetsUpdateWrapper.set(StringUtils.isNotBlank(datasets.getFolderCode()), "folder_code", datasets.getFolderCode());
        llmDatasetsUpdateWrapper.set(StringUtils.isNotBlank(datasets.getName()), "name", datasets.getName());
        llmDatasetsUpdateWrapper.set(StringUtils.isNotBlank(datasets.getCategoryCode()), "category_code", datasets.getCategoryCode());
        llmDatasetsUpdateWrapper.set(StringUtils.isNotBlank(datasets.getSliceType()), "slice_type", datasets.getSliceType());
        llmDatasetsUpdateWrapper.set(StringUtils.isNotBlank(datasets.getSliceTypeSymbol()), "slice_type_symbol", datasets.getSliceTypeSymbol());
        llmDatasetsUpdateWrapper.set(datasets.getSliceTypeMaxLength() > 0, "slice_type_max_length", datasets.getSliceTypeMaxLength());
        llmDatasetsUpdateWrapper.set(datasets.getSliceTypeOverlapLength() > 0, "slice_type_overlap_length", datasets.getSliceTypeOverlapLength());
        return llmDatasetsMapper.updateById(datasets) > 0;
    }

    @Override
    @Transactional(rollbackFor = Exception.class)
    public boolean deleteDataset(String id) throws InterruptedException {
        QueryWrapper<LlmDatasets> llmDatasetsQueryWrapper = new QueryWrapper<>();
        llmDatasetsQueryWrapper.eq("id", id);
        LlmDatasets llmDatasets = llmDatasetsMapper.selectOne(llmDatasetsQueryWrapper, false);
        //只有删除根节点时候才会需要删除milvus 中分区
        if ("0".equals(llmDatasets.getParentId())) {
            //删除分区 (删除分区前需要释放分区)
            MilvusUtils.releasePartition(MilvusConstants.VECTOR_DOCUMENT_SEGMENT, llmDatasets.getDatasetCode());
            //todo 因为是异步操作所以需要判断是否释放成功中间暂时采用睡眠1秒
            Thread.sleep(1000);
            MilvusUtils.deletePartition(MilvusConstants.VECTOR_DOCUMENT_SEGMENT, llmDatasets.getDatasetCode());
        }
        //删除数据库
        int i = llmDatasetsMapper.deleteById(id);
        return i > 0;
    }

    @Override
    public Tree<String> getDatasetTreeById(String id) {
        //查询所有的
        List<LlmDatasets> list = llmDatasetsMapper.selectList(new QueryWrapper<>());

        //构建自己的树
        Tree<String> nodeTree = getNodeTree(list, id);
        //将自己的数据存入
        QueryWrapper<LlmDatasets> llmDatasetsQueryWrapper = new QueryWrapper<>();
        llmDatasetsQueryWrapper.eq("id", id);
        LlmDatasets llmDatasets = llmDatasetsMapper.selectOne(llmDatasetsQueryWrapper);
        nodeTree.put("datasets", llmDatasets);
        nodeTree.setName(llmDatasets.getName());
        return nodeTree;
    }

    @Override
    public LlmDatasets getDatasetById(String id) {
        if (StringUtils.isBlank(id)) {
            return new LlmDatasets();
        }
        return llmDatasetsMapper.selectById(id);
    }

    public Tree<String> getNodeTree(List<LlmDatasets> datasets, String parentId) {
        // 构建node列表
        List<TreeNode<String>> nodeList = CollUtil.newArrayList();
        // 构建node列表
        datasets.forEach(dataset -> {
            Map<String, Object> objectObjectHashMap = new HashMap<>();
            objectObjectHashMap.put("datasets", dataset);
            nodeList.add(new TreeNode<>(dataset.getId(), dataset.getParentId(), dataset.getName(), dataset.getSort()).setExtra(objectObjectHashMap));
        });
        return TreeUtil.buildSingle(nodeList, parentId);
    }


}
