package avicit.bdp.dgs.qa.service.engine.spark;

import avicit.bdp.common.service.dto.DataSourceDTO;
import avicit.bdp.common.service.service.CalculateEngineConfigService;
import avicit.bdp.common.service.service.DataSourceService;
import avicit.bdp.common.utils.ConfigUtils;
import avicit.bdp.common.utils.DESUtils;
import avicit.bdp.common.utils.database.DBUtils;
import avicit.bdp.core.constant.ProcessConstants;
import avicit.bdp.core.util.json.JSONUtils;
import avicit.bdp.dds.dispatch.enums.DataType;
import avicit.bdp.dds.dispatch.enums.Direct;
import avicit.bdp.dds.dispatch.enums.ProgramType;
import avicit.bdp.dds.dispatch.enums.TaskType;
import avicit.bdp.dds.dispatch.model.TwoTuple;
import avicit.bdp.dds.dispatch.process.Property;
import avicit.bdp.dds.dispatch.process.ResourceInfo;
import avicit.bdp.dds.dispatch.task.spark.SparkParameters;
import avicit.bdp.dgs.qa.dto.JobDTO;
import avicit.bdp.dgs.qa.dto.QaDataSourceDTO;
import avicit.bdp.dgs.qa.dto.RuleDTO;
import avicit.bdp.dgs.qa.service.RuleService;
import avicit.bdp.dgs.qa.service.engine.IEngine;
import avicit.bdp.dgs.qa.utils.tools.Md5Utils;
import avicit.platform6.commons.utils.ComUtil;
import avicit.platform6.core.exception.BusinessException;
import com.alibaba.fastjson2.JSONObject;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import java.util.*;

/**
 * @金航数码科技有限责任公司
 * @作者：developer
 * @邮箱：developer@avic-digital.com
 * @创建时间： 2023-09-17
 * @类说明：SparkEngine
 * @修改记录：
 * @注意事项：
 * @主要功能：Spark计算引擎类
 */
@Component("spark")
public class SparkEngine implements IEngine {
    private static final Logger logger = LoggerFactory.getLogger(SparkEngine.class);

    public static final String DEFAULT_JOB_RESULT_TABLE = "bdp_dgs_qa_job_result";

    /**
     * 数据质量计算结果表连接信息
     */
    @Value("${spring.datasource.dbType:mysql}")
    private String dbType;

    @Value("${spring.datasource.url}")
    private String jdbcUrl;

    @Value("${spring.datasource.driver-class-name}")
    private String driverClass;

    @Value("${spring.datasource.username}")
    private String username;

    @Value("${spring.datasource.password}")
    private String password;

    @Autowired
    private RuleService ruleService;

    @Autowired
    private CalculateEngineConfigService engineService;

    @Autowired
    private DataSourceService commDsService;

    @Override
    public Map<String, Object> build(JobDTO job, Map<String, Object> userOptions) {
        // 构建ProcessDefinition
        Map<String, Object> defeMap = new HashMap<>();

        /**
         * 填充顺序
         * 1、填充Spark故障推理任务Json配置信息（localParametersMap）
         * 2、填充SparkParameters配置信息
         * 3、填充TaskNode配置信息
         * 4、填充ProcessDefinition中processDefinitionJson（流程定义信息，格式为List<TaskNode>）
         */
        // step1:填充Spark故障推理任务Json配置信息（localParametersMap）
        List<Property> localParameters = fillLocalParameters(job, userOptions);

        // step2:填充SparkParameters配置信息
        SparkParameters sparkParameters = fillSparkParameters(job, userOptions, localParameters);

        // step3:填充TaskNode配置信息
        List<Map<String, Object>> taskNodes = new ArrayList<>();
        taskNodes.add(fillTaskNode(job, sparkParameters));

        // step4:填充ProcessDefinition中processDefinitionJson
        defeMap.put("globalParams", new ArrayList<>());
        defeMap.put("tasks", taskNodes);

        return defeMap;
    }

    /**
     * 填充SparkParameters中localParameters字段
     *
     * @param job         质量作业
     * @param userOptions 用户自定义设置
     * @return
     */
    public List<Property> fillLocalParameters(JobDTO job, Map<String, Object> userOptions) {
        // step1:初始化
        Map<String, Object> localParamsMap = new HashMap<>();
        Map<String, Object> settingParamsMap = new HashMap<>();
        Map<String, Object> parameterMap = new HashMap<>();
        localParamsMap.put("settingParams", settingParamsMap);
        localParamsMap.put("parameter", parameterMap);

        // step2:填充质量任务名称
        fillSparkTaskName(localParamsMap, job);

        // step3:填充env
        fillSparkTaskEnv(parameterMap);

        // step4:填充task
        fillSparkTaskBaseInfo(parameterMap, job);

        // step5:填充outputConfig
        fillSparkTaskOutputConfig(parameterMap);

        // step6:填充reader/transformer/writer
        fillSparkTaskExecuteRule(parameterMap, job);

        System.out.println(JSONObject.toJSONString(parameterMap));

        // step7:填充CalculateTaskJson字段（用于定位故障推理任务配置信息）
        List<Property> localParameters = new ArrayList<>();
        try {
            Property taskJsonProperty = new Property();
            taskJsonProperty.setProp(ProcessConstants.CALCULATE_TASK_JSON);
            taskJsonProperty.setDirect(Direct.IN);
            taskJsonProperty.setType(DataType.VARCHAR);
            taskJsonProperty.setValue(toJsonWithFormat(localParamsMap));
            localParameters.add(taskJsonProperty);
        } catch (JsonProcessingException e) {
            throw new BusinessException("填充SparkParameters中localParameters字段失败,jobId=" + job.getId());
        }

        return localParameters;
    }

    /**
     * 填充SparkParameters配置信息
     *
     * @param job
     * @param userOptions
     * @param localParameters
     * @return
     */
    private SparkParameters fillSparkParameters(JobDTO job, Map<String, Object> userOptions, List<Property> localParameters) {
        SparkParameters sparkParameters = new SparkParameters();
        sparkParameters.setLocalParams(localParameters);
        sparkParameters.setMainClass("avicit.bdp.quality.QualityApplication");
        sparkParameters.setMainArgs("0");
        sparkParameters.setDeployMode("cluster");
        sparkParameters.setProgramType(ProgramType.JAVA);
        sparkParameters.setSparkVersion("SPARK3");
        // 设置默认计算引擎，设置clusterId
        sparkParameters.setClusterId(engineService.getDefaultSparkProcessResource());

        String driverMemory = "1G";
        String executorMemory = "2G";
        int driverCore = 1;
        int executorCores = 2;
        int numExecutors = 3;
        if (userOptions != null) {
            if (userOptions.get("driverMemory") != null) {
                driverMemory = userOptions.get("driverMemory").toString();
            }
            if (userOptions.get("executorMemory") != null) {
                executorMemory = userOptions.get("executorMemory").toString();
            }
            if (userOptions.get("driverCore") != null) {
                driverCore = Integer.valueOf(userOptions.get("driverCore").toString());
            }
            if (userOptions.get("executorCores") != null) {
                executorCores = Integer.valueOf(userOptions.get("executorCores").toString());
            }
            if (userOptions.get("numExecutors") != null) {
                numExecutors = Integer.valueOf(userOptions.get("numExecutors").toString());
            }
        }
        sparkParameters.setDriverMemory(driverMemory);
        sparkParameters.setExecutorMemory(executorMemory);
        sparkParameters.setDriverCores(driverCore);
        sparkParameters.setExecutorCores(executorCores);
        sparkParameters.setNumExecutors(numExecutors);

        ResourceInfo mainJarRes = new ResourceInfo();
        mainJarRes.setRes(ConfigUtils.getInstance().getString("spark.jar.path"));
        sparkParameters.setMainJar(mainJarRes);

        return sparkParameters;
    }

    /**
     * 填充TaskNode信息
     *
     * @param job
     * @param sparkParameters
     * @return
     */
    private Map<String, Object> fillTaskNode(JobDTO job, SparkParameters sparkParameters) {
        Map<String, Object> taskMap = new HashMap<>();

        taskMap.put("id", ComUtil.getId());
        taskMap.put("name", job.getName());
        taskMap.put("code", "ALGORITHM");
        taskMap.put("nodeId", ComUtil.getId());
        taskMap.put("color", "#248ad8");
        taskMap.put("iconName", "algorithm");
        taskMap.put("type", TaskType.SPARK.name());
        taskMap.put("preTasks", new ArrayList<>());
        taskMap.put("preTasksObj", new HashMap<>());
        taskMap.put("runFlag", "NORMAL");
        taskMap.put("description", "质量检查任务...");
        taskMap.put("dependence", new HashMap<>());
        taskMap.put("maxRetryTimes", 0);
        taskMap.put("retryInterval", 1);
        taskMap.put("timeout", "");
        taskMap.put("taskInstancePriority", "MEDIUM");
        taskMap.put("workerGroup", "default");
        taskMap.put("params", JSONUtils.toJsonString(sparkParameters));

        return taskMap;
    }

    /**
     * 填充Spark校验任务名称
     *
     * @param localParamsMap
     * @param job
     */
    private void fillSparkTaskName(Map<String, Object> localParamsMap, JobDTO job) {
        localParamsMap.put("name", job.getName());
    }

    /**
     * 填充Spark校验任务执行环境配置信息
     *
     * @param parameterMap
     */
    private void fillSparkTaskEnv(Map<String, Object> parameterMap) {
        Map<String, Object> envMap = new HashMap<>();
        envMap.put("type", "batch");
        envMap.put("config", null);

        parameterMap.put("env", envMap);
    }

    /**
     * 填充Spark校验任务基本信息，包括jobId、jobName、调度方式、校验类型
     *
     * @param parameterMap
     * @param job
     */
    private void fillSparkTaskBaseInfo(Map<String, Object> parameterMap, JobDTO job) {
        Map<String, Object> taskMap = new HashMap<>();

        taskMap.put("id", job.getId());
        taskMap.put("name", job.getName());
        taskMap.put("schedulerType", job.getSchedulerType());
        taskMap.put("measureType", job.getMeasureType());
        taskMap.put("datasourceName", job.getDatasourceName());
        taskMap.put("dbName", job.getDbName());
        taskMap.put("tableName", job.getTableName());
        taskMap.put("remark", job.getRemark());

        parameterMap.put("task", taskMap);
    }

    /**
     * 填充Spark校验任务输出配置信息
     *
     * @param parameterMap
     */
    private void fillSparkTaskOutputConfig(Map<String, Object> parameterMap) {
        Map<String, Object> outputConfigMap = new HashMap<>();

        outputConfigMap.put("dbType", this.dbType == null ? "mysql" : this.dbType);
        outputConfigMap.put("jdbcUrl", this.jdbcUrl);
        outputConfigMap.put("driverClass", this.driverClass);
        outputConfigMap.put("username", this.username);
        outputConfigMap.put("password", this.password);
        outputConfigMap.put("tableName", DEFAULT_JOB_RESULT_TABLE);

        parameterMap.put("outputConfig", outputConfigMap);
    }

    /**
     * 填充Spark校验任务执行规则，包括reader、transformer、writer信息
     *
     * @param job
     * @param parameterMap
     */
    private void fillSparkTaskExecuteRule(Map<String, Object> parameterMap, JobDTO job) {
        List<Map<String, Object>> readers = new ArrayList<>();
        List<Map<String, Object>> transformers = new ArrayList<>();
        List<Map<String, Object>> writers = new ArrayList<>();

        List<RuleDTO> ruleList = this.ruleService.getRuleListByJobId(job.getId());
        if (CollectionUtils.isNotEmpty(ruleList)) {
            Set<String> dsSet = new HashSet<>();
            for (RuleDTO rule : ruleList) {
                if (rule == null) {
                    continue;
                }

                // 获取表别名
                Map<String, String> aliasMap = getAliasName(rule);

                // 设置reader
                List<Map<String, Object>> currReaderList = fillSparkTaskReader(rule, parameterMap, aliasMap, dsSet);
                if (CollectionUtils.isNotEmpty(currReaderList)) {
                    readers.addAll(currReaderList);
                }

                // 设置transformer
                Map<String, Object> transformerMap = fillSparkTaskTransformer(rule, parameterMap, aliasMap);
                if (transformerMap.size() > 0) {
                    transformers.add(transformerMap);
                }

                // 设置writer
                Map<String, Object> writerMap = fillSparkTaskWriter(rule, parameterMap, aliasMap);
                if (writerMap.size() > 0) {
                    writers.add(writerMap);
                }
            }
        }

        parameterMap.put("readers", readers);
        parameterMap.put("transformers", transformers);
        parameterMap.put("writers", writers);
    }

    /**
     * 填充reader
     *
     * @param rule
     * @param parameterMap
     * @param aliasMap
     * @param dsSet
     * @return
     */
    private List<Map<String, Object>> fillSparkTaskReader(RuleDTO rule, Map<String, Object> parameterMap,
                                                          Map<String, String> aliasMap, Set<String> dsSet) {
        List<Map<String, Object>> readers = new ArrayList<>();
        List<QaDataSourceDTO> dataSourceList = rule.getDataSourceList();
        if (CollectionUtils.isNotEmpty(dataSourceList)) {
            for (QaDataSourceDTO ds : dataSourceList) {
                if (ds == null) {
                    continue;
                }

                // 去重，多个规则可能配置相同的数据源
                String md5Value = Md5Utils.encryptToMD5(ds.getDatasourceId() + ds.getTableName());
                String outTable = aliasMap.get(md5Value);
                if (dsSet.contains(outTable)) {
                    continue;
                }
                dsSet.add(outTable);

                // 获取数据源信息
                DataSourceDTO dataSource = this.commDsService.queryDataSourceById(ds.getDatasourceId());
                if (dataSource == null) {
                    logger.error("未找到指定数据源,创建或更新质量任务失败,ruleId=" + rule.getId());
                    continue;
                }
                if (StringUtils.isEmpty(dataSource.getConnectionParams())) {
                    logger.error("指定数据源未配置连接参数,创建或更新质量任务失败,ruleId=" + rule.getId());
                    continue;
                }
                JSONObject connObj = JSONObject.parseObject(dataSource.getConnectionParams());

                Map<String, Object> readerMap = new HashMap<>();
                readerMap.put("type", "JDBC");
                Map<String, Object> configMap = new HashMap<>();
                configMap.put("id", ds.getId());
                configMap.put("dbType", ds.getDatasourceType());
                configMap.put("driverClass", DBUtils.getBaseDataSource(dataSource).driverClassSelector());
                configMap.put("jdbcUrl", connObj.getString("jdbcUrl"));
                configMap.put("username", connObj.getString("user"));
                configMap.put("tableName", ds.getTableName());
                configMap.put("password", DESUtils.decrypt(connObj.getString("password")));
                configMap.put("outputTable", outTable);

                readerMap.put("config", configMap);
                readers.add(readerMap);
            }
        }

        return readers;
    }

    /**
     * 填充reader
     *
     * @param rule
     * @param parameterMap
     * @param aliasMap
     * @return
     */
    private Map<String, Object> fillSparkTaskTransformer(RuleDTO rule, Map<String, Object> parameterMap, Map<String, String> aliasMap) {
        Map<String, Object> transformerMap = new HashMap<>();

        List<QaDataSourceDTO> dataSourceList = rule.getDataSourceList();
        if (CollectionUtils.isNotEmpty(dataSourceList) && StringUtils.isNotEmpty(rule.getAbnormalSql())) {
            // 按照表名称排序，解决table/table1替换问题，例如select from ${t1},${t2}，如果先替换table，后面的table1会受到影响
            List<TwoTuple> tupleList = new ArrayList<>();
            for (QaDataSourceDTO ds : dataSourceList) {
                if (ds == null) {
                    continue;
                }

                TwoTuple tuple = new TwoTuple<>(ds.getDatasourceId(), ds.getTableName());
                tupleList.add(tuple);
            }

            tupleList.sort((o1, o2) -> {
                return ((String) o2.getSecond()).compareTo((String) o1.getSecond());
            });
            for (TwoTuple tuple : tupleList) {
                String datasourceId = (String) tuple.getFirst();
                String tableName = (String) tuple.getSecond();
                String md5Value = Md5Utils.encryptToMD5(datasourceId + tableName);
                String aliasName = aliasMap.get(md5Value);
                if (StringUtils.isNotEmpty(aliasName)) {
                    rule.setAbnormalSql(rule.getAbnormalSql().toLowerCase().replaceAll(tableName.toLowerCase(), aliasName));
                }
            }
        }

        transformerMap.put("type", "SQL");
        Map<String, Object> configMap = new HashMap<>();
        configMap.put("sql", rule.getAbnormalSql());
        configMap.put("outputTable", "view_" + rule.getId());
        transformerMap.put("config", configMap);

        return transformerMap;
    }

    /**
     * 填充reader
     *
     * @param rule
     * @param parameterMap
     * @param aliasMap
     * @return
     */
    private Map<String, Object> fillSparkTaskWriter(RuleDTO rule, Map<String, Object> parameterMap, Map<String, String> aliasMap) {
        Map<String, Object> writerMap = new HashMap<>();

        writerMap.put("type", "JDBC");
        Map<String, Object> configMap = new HashMap<>();
        writerMap.put("config", configMap);

        // 设置规则信息
        configMap.put("id", rule.getId());
        configMap.put("jobId", rule.getJobId());
        configMap.put("name", rule.getName());
        configMap.put("blockingType", rule.getBlockingType());
        configMap.put("blockingTypeName", rule.getBlockingTypeName());
        configMap.put("measureType", rule.getMeasureType());
        configMap.put("measureTypeName", rule.getMeasureTypeName());
        configMap.put("templateType", rule.getTemplateType());
        configMap.put("templateTypeName", rule.getTemplateTypeName());
        configMap.put("templateId", rule.getTemplateId());
        configMap.put("templateName", rule.getTemplateName());
        configMap.put("weight", rule.getWeight());
        configMap.put("weightRatio", rule.getWeightRatio());
        configMap.put("statDataType", rule.getStatDataType());
        configMap.put("alarmFormula", rule.getAlarmFormula());
        configMap.put("dirtyDataType", rule.getDirtyDataType());
        configMap.put("calculateTable", "view_" + rule.getId());

        // 设置数据源信息
        List<Map<String, Object>> dataSourceList = new ArrayList<>();
        if (CollectionUtils.isNotEmpty(rule.getDataSourceList())) {
            for (QaDataSourceDTO ds : rule.getDataSourceList()) {
                if (ds == null) {
                    continue;
                }

                String md5Value = Md5Utils.encryptToMD5(ds.getDatasourceId() + ds.getTableName());
                String aliasName = aliasMap.get(md5Value);
                DataSourceDTO dataSource = this.commDsService.queryDataSourceById(ds.getDatasourceId());
                if (dataSource == null) {
                    logger.error("未找到指定数据源,创建或更新质量任务失败,ruleId=" + rule.getId());
                    continue;
                }
                if (StringUtils.isEmpty(dataSource.getConnectionParams())) {
                    logger.error("指定数据源未配置连接参数,创建或更新质量任务失败,ruleId=" + rule.getId());
                    continue;
                }
                JSONObject connObj = JSONObject.parseObject(dataSource.getConnectionParams());

                Map<String, Object> dsMap = new HashMap<>();
                dsMap.put("id", ds.getId());
                dsMap.put("businessId", ds.getDatasourceType());
                dsMap.put("datasourceType", ds.getDatasourceType());
                dsMap.put("datasourceId", ds.getDatasourceId());
                dsMap.put("datasourceName", ds.getDatasourceName());
                dsMap.put("dbName", ds.getDbName());
                dsMap.put("tableName", ds.getTableName());
                dsMap.put("field", ds.getField());
                dsMap.put("filter", ds.getFilter());
                dsMap.put("countTable", aliasName);
                dsMap.put("ip", connObj.getString("ip"));
                dsMap.put("port", connObj.getString("port"));
                dataSourceList.add(dsMap);
            }
            configMap.put("dataSourceList", dataSourceList);
        }

        return writerMap;
    }

    /**
     * 格式化对象为JSON串
     *
     * @param obj
     * @return
     * @throws JsonProcessingException
     */
    private String toJsonWithFormat(Object obj) throws JsonProcessingException {
        if (obj == null) {
            return null;
        }
        ObjectWriter mapper = new ObjectMapper().writer().withDefaultPrettyPrinter();
        return mapper.writeValueAsString(obj);
    }

    /**
     * 获取表别名
     * 注意：
     * 1、不同质量规则可能配置相同名字的表，所以不能依据tableName生成SparkSQL中的view；
     * 2、根据datasourceId+tableName计算md5值，作为表别名（SparkSQL中的view）；
     *
     * @param rule
     * @return Map<md5 ( datasourceId, tableName ), aliasName></>
     */
    private Map<String, String> getAliasName(RuleDTO rule) {
        Map<String, String> qaDsAliasMap = new HashMap<>();

        if (rule == null) {
            return qaDsAliasMap;
        }

        List<QaDataSourceDTO> dataSourceList = rule.getDataSourceList();
        if (CollectionUtils.isNotEmpty(dataSourceList)) {
            for (QaDataSourceDTO ds : dataSourceList) {
                if (ds == null) {
                    continue;
                }

                String md5Value = Md5Utils.encryptToMD5(ds.getDatasourceId() + ds.getTableName());
                if (!qaDsAliasMap.values().contains(md5Value)) {
                    qaDsAliasMap.put(md5Value, "table_" + md5Value);
                }
            }
        }

        return qaDsAliasMap;
    }
}
