package com.cl.ks.service;

import com.cl.ks.entity.SparkNode;
import com.cl.ks.entity.SparkNodeCategory;
import com.cl.ks.repository.SparkNodeCategoryRepository;
import com.cl.mvc.service.BaseService;
import com.cl.spark.enums.SparkNodeEnum;
import com.cl.spark.factory.SparkNodeFactory;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.stereotype.Service;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;

@Service
public class SparkNodeCategoryService extends BaseService<SparkNodeCategory> {

    private final KsRuleService ksRuleService;
    private final SparkNodeFactory sparkNodeFactory;

    // 缓存 SparkNodeEnum 查找结果
    private static final Map<String, SparkNodeEnum> NODE_ENUM_CACHE = new ConcurrentHashMap<>();

    // 常量
    private static final String BASE_CATEGORY_NAME = "基础操作";
    private static final String STATISTICS_CATEGORY_NAME = "统计操作";
    private static final String OUTPUT_CATEGORY_NAME = "输出操作";
    private static final String DATA_CATEGORY_NAME = "数据节点";
    private static final String OTHER_CATEGORY_NAME = "其他节点";

    protected SparkNodeCategoryService(SparkNodeCategoryRepository SparkNodeCategoryRepository, KsRuleService ksRuleService, SparkNodeFactory sparkNodeFactory) {
        super(SparkNodeCategoryRepository);
        this.ksRuleService = ksRuleService;
        this.sparkNodeFactory = sparkNodeFactory;
    }

    @Override
    public List<SparkNodeCategory> list() {
        List<SparkNodeCategory> categories = new ArrayList<>();
        categories.add(createCategory(BASE_CATEGORY_NAME, createBaseNodes()));
        categories.add(createCategory(STATISTICS_CATEGORY_NAME, createStatisticsNodes()));
        categories.add(createCategory(OUTPUT_CATEGORY_NAME, createOutputNodes()));
        categories.add(createCategory(DATA_CATEGORY_NAME, createDataNodes()));
        categories.add(createCategory(OTHER_CATEGORY_NAME, createOtherNodes()));

        // 设置节点 ID
        int index = 1;
        for (SparkNodeCategory category : categories) {
            for (SparkNode node : category.getSparkNodeList()) {
                node.setId(index++);
            }
        }

        return categories;
    }

    // 创建分类
    private SparkNodeCategory createCategory(String categoryName, List<SparkNode> nodes) {
        SparkNodeCategory category = new SparkNodeCategory();
        category.setCategoryName(categoryName);
        category.setSparkNodeList(nodes);
        return category;
    }

    // 创建基础操作节点
    private List<SparkNode> createBaseNodes() {
        List<SparkNode> nodes = new ArrayList<>();
        nodes.add(createSparkNode("连接", 2, 2, "el-icon-share", "根据指定字段连接两个数据节点的数据行"));
        nodes.add(createSparkNode("合并", 2, 2, "el-icon-d-arrow-right", "以行累加的方式合并两个相同结构的数据节点的所有数据"));
        nodes.add(createSparkNode("多输入合并", 10, 2, "el-icon-d-arrow-right", "字段必须相同,可以传入10个输入"));
        nodes.add(createSparkNode("排序", 1, 1, "el-icon-arrow-down", "根据指定字段对数据节点排序。排序分为升序和降序"));
        nodes.add(createSparkNode("去重", 1, 1, "el-icon-remove", "根据所选字段去重"));
        nodes.add(createSparkNode("分组去重", 1, 1, "el-icon-remove", "根据所选字段去重"));
        nodes.add(createSparkNode("追加固定值", 1, 1, "el-icon-remove", "追加指定字段名和固定值"));
        nodes.add(createSparkNode("追加字段值", 1, 1, "el-icon-remove", "追加指定字段名和字段值"));
        nodes.add(createSparkNode("数值计算", 1, 1, "el-icon-document", "指定字段计算数值"));
        nodes.add(createSparkNode("规则引用", 1, 1, "", "规则批量执行"));
        nodes.add(createSparkNode("分组子流程", 1, 1, "", "分组子流程"));
        nodes.add(createSparkNode("克隆字段", 1, 1, "", "克隆字段"));
        nodes.add(createSparkNode("JSON处理", 1, 1, "", "JSON处理"));
        return nodes;
    }

    // 创建统计操作节点
    private List<SparkNode> createStatisticsNodes() {
        List<SparkNode> nodes = new ArrayList<>();
        nodes.add(createSparkNode("分类汇总", 1, 1, "el-icon-document", "按照某个或多个字段（如名称、日期等）中的共同属性（不同种类），对其他字段数据值进行分类汇总求和（平均值、计数、最大值、最小值等）。"));
        nodes.add(createSparkNode("匹配", 2, 2, "el-icon-circle-check", "判断第一个数据中的某个字段的值是否与第二个数据的所选字段中的某个值匹配（匹配指的是两个值完全相同）"));
        nodes.add(createSparkNode("过滤", 1, 1, "el-icon-arrow-down", "提供基础的过滤操作"));
        nodes.add(createSparkNode("函数", 1, 1, "el-icon-arrow-down", "计数等函数"));
        nodes.add(createSparkNode("日期过滤", 1, 1, "el-icon-arrow-down", "提供基础的日期过滤操作"));
        nodes.add(createSparkNode("字段值映射", 1, 1, "el-icon-arrow-down", "字段值映射"));
        return nodes;
    }

    // 创建输出操作节点
    private List<SparkNode> createOutputNodes() {
        List<SparkNode> nodes = new ArrayList<>();
        nodes.add(createSparkNode("接口输出", 1, 1, "el-icon-arrow-right", "请求http接口,不接收返回值"));
        nodes.add(createSparkNode("输出表", 1, 1, "el-icon-data-line", "输出数据到指定库的表"));
        nodes.add(createSparkNode("持久化数据", 1, 1, "el-icon-arrow-down", "持久化数据到本地，流程图可以直接使用读取持久化数据节点读取"));
        nodes.add(createSparkNode("推送线索", 1, 1, "", ""));
        nodes.add(createSparkNode("业务线索推送", 1, 1, "", ""));
        return nodes;
    }

    // 创建数据节点
    private List<SparkNode> createDataNodes() {
        List<SparkNode> nodes = new ArrayList<>();
        nodes.add(createSparkNode("数据资源", 1, 1, "el-icon-arrow-right", "手动填写dbCode和表名代替数据源节点"));
        nodes.add(createSparkNode("SQL查询", 1, 1, "el-icon-data-line", "直接执行SQL查询数据库"));
        nodes.add(createSparkNode("读取持久化数据", 1, 1, "el-icon-arrow-down", "持久化数据到本地，流程图可以直接使用读取持久化数据节点读取"));
        return nodes;
    }

    // 创建其他节点
    private List<SparkNode> createOtherNodes() {
        List<SparkNode> nodes = new ArrayList<>();
        nodes.add(createSparkNode("请求接口", 1, 1, "el-icon-link", "请求http接口"));
        return nodes;
    }

    // 创建 SparkNode
    private SparkNode createSparkNode(String nodeName, Integer inputMax, Integer inputMin, String icon, String description) {
        SparkNodeEnum sparkNodeEnum = NODE_ENUM_CACHE.computeIfAbsent(nodeName, SparkNodeEnum::getSparkNodeByNodeName);

        SparkNode sparkNode = new SparkNode();
        sparkNode.setNodeName(nodeName);
        sparkNode.setNodePath(sparkNodeEnum.getCode());
        sparkNode.setNodeTemplate(sparkNodeEnum.getNodeTemplate());
        sparkNode.setInputDataListSizeMax(inputMax);
        sparkNode.setInputDataListSizeMin(inputMin);
        sparkNode.setIcon(icon);
        sparkNode.setNodeDescription(description);

        return sparkNode;
    }
}
