package avicit.bdp.dds.server.worker.task.sparkflow.utils;

import avicit.bdp.common.datasource.BaseDataSource;
import avicit.bdp.common.datasource.BaseKerberosDataSource;
import avicit.bdp.common.datasource.HdfsDataSource;
import avicit.bdp.common.datasource.KafkaDataSource;
import avicit.bdp.common.datasource.MySQLDataSource;
import avicit.bdp.common.dto.CalculateEngineConf;
import avicit.bdp.common.dto.StorageResourceConf;
import avicit.bdp.common.utils.ConfigUtils;
import avicit.bdp.common.utils.uploads.FileAdapterUtils;
import avicit.bdp.core.constant.Constants;
import avicit.bdp.core.util.json.JSONUtils;
import avicit.bdp.dds.common.utils.placeholder.PlaceholderUtils;
import avicit.bdp.dds.dao.entity.AlgoNodes;
import avicit.bdp.dds.dao.entity.FlinkConfig;
import avicit.bdp.dds.dao.entity.ProcessInstance;
import avicit.bdp.dds.dao.entity.ProcessResourceSettings;
import avicit.bdp.dds.dao.entity.Resource;
import avicit.bdp.dds.dao.entity.SparkConfig;
import avicit.bdp.dds.dao.entity.TaskNodes;
import avicit.bdp.dds.dispatch.model.ProcessData;
import avicit.bdp.dds.dispatch.model.TaskNode;
import avicit.bdp.dds.dispatch.model.TaskNodeConnect;
import avicit.bdp.dds.dispatch.task.sparkflow.SparkFlowParameters;
import avicit.bdp.dds.service.process.ProcessService;
import com.alibaba.fastjson2.JSON;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * @author xugb
 */
public class FlowConvertUtils {

    private static final Logger logger = LoggerFactory.getLogger(FlowConvertUtils.class);

    public static String processToJson(ProcessInstance instance,
                                       Map<String, Map<String, String>> nodesMap,
                                       String checkpoint,
                                       RunModeType runModeType,
                                       ProcessService processService,
                                       List<String> postDataList,
                                       Map<String, String> taskInstanceMap,
                                       CalculateEngineConf yarnConf,
                                       Map<String, String> paramsMap,
                                       String definitionType,
                                       List<ProcessResourceSettings> prList,
                                       Map<String, String> redisConfig) {
        Map<String, Object> flowVoMap = new HashMap<>(16);
        flowVoMap.put("name", instance.getName());
        flowVoMap.put("uuid", instance.getId());
        flowVoMap.put("version", 1.1);
        if (definitionType.toLowerCase().contains("stream")) {
            flowVoMap.put("jobMode", "STREAMING");
        } else {
            flowVoMap.put("jobMode", "BATCH");
        }
        Map<String, Object> runConfigMap = new HashMap<>();
        runConfigMap.put("configPath", yarnConf.getConfigPath());
        //spark资源设置
        if (definitionType.startsWith("spark")) {
            ProcessResourceSettings processResourceSettings = getCurrentProcessResourceSettings(prList, ProcessResourceSettings.TYPE_SPARK);
            SparkConfig sparkConfig = null;
            if (processResourceSettings != null) {
                sparkConfig = processResourceSettings.getSparkConfig();
            }
            if (sparkConfig == null) {
                sparkConfig = new SparkConfig();
                sparkConfig.setDeployMode("cluster");
                sparkConfig.setDriverCores(1);
                sparkConfig.setDriverMemory("1g");
                sparkConfig.setExecutorCores(1);
                sparkConfig.setExecutorMemory("1g");
                sparkConfig.setNumExecutors(1);
            } else {
                sparkConfig.setDriverMemory(sparkConfig.getDriverMemory() + "g");
                sparkConfig.setExecutorMemory(sparkConfig.getExecutorMemory() + "g");
            }
            flowVoMap.put("driverMemory", sparkConfig.getDriverMemory());
            flowVoMap.put("executorMemory", sparkConfig.getExecutorMemory());
            flowVoMap.put("executorCores", "" + sparkConfig.getExecutorCores());
            flowVoMap.put("executorNumber", "" + sparkConfig.getNumExecutors());
            flowVoMap.put("driverCores", "" + sparkConfig.getDriverCores());
            flowVoMap.put("deployMode", sparkConfig.getDeployMode());
            flowVoMap.put("flowType", "SPARK3");
            
            String master = "yarn";
            // 1 standalone模式： 集群地址就是standalone服务地址
            if (yarnConf.getDeployMode().equalsIgnoreCase("1")) {
                master = yarnConf.getYarnAddress();
            }

            runConfigMap.put("spark.master", master);
            runConfigMap.put("spark.deploy.mode", sparkConfig.getDeployMode());

        } else {
            flowVoMap.put("flowType", "FLINK_15");

            Map<String, Object> envConfigMap = new HashMap<>();
            // Flink资源
            ProcessResourceSettings processResourceSettings = getCurrentProcessResourceSettings(prList, ProcessResourceSettings.TYPE_FLINK);
            FlinkConfig flinkConfig = null;
            if (processResourceSettings != null) {
                flinkConfig = processResourceSettings.getFlinkConfig();
            }
            if (flinkConfig != null) {
                envConfigMap.put("jobmanager.memory.process.size", flinkConfig.getJobManagerMemory() + "g");
                envConfigMap.put("taskmanager.memory.process.size", flinkConfig.getTaskManagerMemory() + "g");
                envConfigMap.put("taskmanager.numberOfTaskSlots", flinkConfig.getSlot());
                envConfigMap.put("parallelism", flinkConfig.getTaskManager());
            }
            flowVoMap.put("env", envConfigMap);
            String master = "yarn-cluster";
            String flinkDeploy = "run-application";
            // 1 standalone模式： 集群地址就是standalone服务地址
            if (yarnConf.getDeployMode().equalsIgnoreCase("1")) {
                master = yarnConf.getYarnAddress();
                flinkDeploy = "run";
            }
            runConfigMap.put("dataflow.master", master);
            runConfigMap.put("dataflow.deploy.mode", flinkDeploy);

        }


        runConfigMap.put("yarn.url", yarnConf.getYarnAddress());
        StorageResourceConf resourceConf = FileAdapterUtils.getStorageResourceByProjectId(instance.getProjectId());
        if (FileAdapterUtils.TYPE_MINIO.equals(resourceConf.getType())) {
            //oss对象
            runConfigMap.put("fs.s3a.access.key", resourceConf.getAccessKey());
            runConfigMap.put("fs.s3a.secret.key", resourceConf.getSecretKey());
            runConfigMap.put("fs.s3a.endpoint", resourceConf.getEndPoint());
            runConfigMap.put("fs.s3a.bucketname", resourceConf.getStoragePath());
            runConfigMap.put("bdp.store.type", "s3a");
        } else {
            runConfigMap.put("bdp.store.type", "hdfs");
            runConfigMap.put("fs.defaultFS", resourceConf.getDeFaultFs());
        }


        runConfigMap.put("minio.endpoint", ConfigUtils.getInstance().getString("minio.endPoint.url", "http://127.0.0.1:9000"));
        runConfigMap.put("minio.username", ConfigUtils.getInstance().getString("minio.username", "minioadmin"));
        runConfigMap.put("minio.passward", ConfigUtils.getInstance().getString("minio.passward", "minioadmin"));
        runConfigMap.put("data.show", ConfigUtils.getInstance().getString("data.show", "10"));
        runConfigMap.put("data.save", ConfigUtils.getInstance().getString("data.save", "50"));
        runConfigMap.put("bdp.dds.url", ConfigUtils.getInstance().getString("bdp.dds.url", "http://127.0.0.1:9090"));
        runConfigMap.put("piflow.special", ConfigUtils.getInstance().getString("piflow.special", "false"));
        runConfigMap.put("piflow.algorithm.path", ConfigUtils.getInstance().getString("piflow.algorithm.path", "/phm/plugins/algorithm"));
        runConfigMap.put("spark.yarn.jars", ConfigUtils.getInstance().getString("spark.yarn.jars", ""));
        runConfigMap.put("python.home", ConfigUtils.getInstance().getString("python.home", "python3"));
        runConfigMap.put("redis.ip", redisConfig.get("redisHost"));
        runConfigMap.put("redis.port", redisConfig.get("redisPort"));
        runConfigMap.put("redis.password", redisConfig.get("redisPassword"));
        flowVoMap.put("runConfig", runConfigMap);

        ProcessData processData = JSON.parseObject(instance.getProcessInstanceJson(), ProcessData.class);
        List<TaskNode> taskNodes = processData.getTasks();
        List<TaskNodeConnect> taskNodeConnects = JSONUtils.toList(instance.getConnects(), TaskNodeConnect.class);

        // all stops
        Map<String, TaskNode> stopsMap = new HashMap<>(8);
        List<Map<String, Object>> processStopMapList = new ArrayList<>();

        List<TaskNode> inputTaskNodeList = new ArrayList<>();
        for (TaskNode taskNode : taskNodes) {
            stopsMap.put(taskNode.getId(), taskNode);

            int nodeType = taskNode.getNodeType() == null ? 0 : taskNode.getNodeType();
            if (nodeType == 1) {
                inputTaskNodeList.add(taskNode);
            }
        }

        // paths
        List<FlowPathVo> thirdPathVoMapList = new ArrayList<>();

        if (CollectionUtils.isNotEmpty(taskNodeConnects)) {
            for (TaskNodeConnect processPath : taskNodeConnects) {
                TaskNode fromTaskNode = stopsMap.get(processPath.getEndPointSourceId());
                TaskNode toTaskNode = stopsMap.get(processPath.getEndPointTargetId());
                if (null == fromTaskNode) {
                    fromTaskNode = new TaskNode();
                }
                if (null == toTaskNode) {
                    toTaskNode = new TaskNode();
                }
                String to = (null != toTaskNode.getName() ? toTaskNode.getName() : "");
                String outport = (null != processPath.getOutport() && !"default".equalsIgnoreCase(processPath.getOutport()) && !"Any".equalsIgnoreCase(processPath.getOutport()) ? processPath.getOutport() : "");
                String inport = (null != processPath.getInport() && !"default".equalsIgnoreCase(processPath.getInport()) ? processPath.getInport() : "");
                String from = (null != fromTaskNode.getName() ? fromTaskNode.getName() : "");
                FlowPathVo path = new FlowPathVo();
                path.setFrom(from);
                path.setInport(inport);
                path.setOutport(outport);
                path.setTo(to);
                thirdPathVoMapList.add(path);
            }
        }
        flowVoMap.put("paths", thirdPathVoMapList);

        for (String stopPageId : stopsMap.keySet()) {

            TaskNode taskNode = stopsMap.get(stopPageId);
            SparkFlowParameters parameter = JSONUtils.parseObject(taskNode.getParams(), SparkFlowParameters.class);
            Map<String, Object> thirdStopVo = new HashMap<>(8);
            thirdStopVo.put("uuid", taskInstanceMap.get(taskNode.getName()));
            thirdStopVo.put("name", taskNode.getName());
            thirdStopVo.put("bundle", parameter.getBundle());
            thirdStopVo.put("nodeType", taskNode.getNodeType() == null ? 0 : taskNode.getNodeType());

            /*
             * 需要参数转换的自定义组件MAP
             * KEY: 组件name值
             * value: 组件component值
             */
            Map<String, String> nodeComponentMap = nodesMap.get(taskNode.getNodeId());

            /*
             * 组件传递的值
             */
            Map<String, Object> properties = JSONUtils.toObjectMap(parameter.getSettingParams());

            //处理节点参数前端和后台不一致问题
            handleProperties(nodeComponentMap, properties, processService, postDataList, taskInstanceMap, inputTaskNodeList, instance);

            //处理部分节点默认参数问题
            handleNodeProperties(parameter, taskNode.getName(), thirdPathVoMapList, properties,
                    taskNode.getNodeId(), processService, instance);

            int nodeType = taskNode.getNodeType() == null ? 0 : taskNode.getNodeType();
            //输入节点自动增加选择列名的参数
            if (nodeType == 1) {
                List<FlowColumn> columnList = JSONUtils.toList(taskNode.getOutputData(), FlowColumn.class);
                List<String> list = new ArrayList<>();
                for (FlowColumn column : columnList) {
                    list.add(column.getName());
                }
                String schema = String.join(",", list);
                properties.put("_schema", schema);
            }

            // 全局参数的替换
            handleGlobalParams(properties, paramsMap);

            thirdStopVo.put("properties", properties);

            // StopCustomizedProperty  TODO 暂时未处理
            Map<String, Object> customizedProperties = new HashMap<>(16);
            thirdStopVo.put("customizedProperties", customizedProperties);

            processStopMapList.add(thirdStopVo);
        }
        flowVoMap.put("stops", processStopMapList);

        //checkpoint
        if (StringUtils.isNotBlank(checkpoint)) {
            flowVoMap.put("checkpoint", checkpoint);
        }
        if (RunModeType.DEBUG == runModeType) {
            flowVoMap.put("runMode", runModeType.getValue());
        }

        Map<String, Object> rtnMap = new HashMap<>(16);
        rtnMap.put("flow", flowVoMap);
        return JSONUtils.toJson(rtnMap);
    }

    /**
     * 获取当前资源配置
     *
     * @param prList
     * @param type
     * @return avicit.bdp.dds.dao.entity.ProcessResourceSettings
     */
    private static ProcessResourceSettings getCurrentProcessResourceSettings(List<ProcessResourceSettings> prList, int type) {
        if (prList == null || prList.size() == 0) {
            return null;
        }
        for (ProcessResourceSettings settings : prList) {
            if (type == settings.getType()) {
                return settings;
            }
        }
        return null;
    }

    /**
     * 统一对参数进行全局参数的替换， 替换格式${a}
     *
     * @param properties
     * @param paramsMap
     * @return void
     */
    private static void handleGlobalParams(Map<String, Object> properties, Map<String, String> paramsMap) {
        for (String key : properties.keySet()) {
            Object obj = properties.get(key);
            if (obj instanceof String) {
                String param = PlaceholderUtils.replacePlaceholders(obj.toString(), paramsMap, true);
                properties.put(key, param);
            }
        }
    }

    private static void handleNodeProperties(SparkFlowParameters parameter, String taskName,
                                             List<FlowPathVo> thirdPathVoMapList,
                                             Map<String, Object> properties, String id,
                                             ProcessService processService, ProcessInstance instance) {

        if ("avicit.bdp.bundle.common.Fork".equalsIgnoreCase(parameter.getBundle())) {
            List<String> list = new ArrayList<>();
            for (FlowPathVo path : thirdPathVoMapList) {
                if (path.getFrom().equals(taskName)) {
                    list.add(path.getOutport());
                }
            }
            properties.put("outports", StringUtils.join(list, Constants.COMMA));
        } else if ("avicit.bdp.bundle.common.Merge".equalsIgnoreCase(parameter.getBundle())) {
            List<String> list = new ArrayList<>();
            for (FlowPathVo path : thirdPathVoMapList) {
                if (path.getTo().equals(taskName)) {
                    list.add(path.getInport());
                }
            }
            properties.put("inports", StringUtils.join(list, Constants.COMMA));
        } else if (StringUtils.equalsAnyIgnoreCase(parameter.getBundle(),
                "avicit.bdp.bundle.script.ExecutePythonAlgo",
                "avicit.bdp.bundle.script.ExecutePythonAlgo2Inport")) {

            AlgoNodes algoNodes = processService.getTaskAlgoNodesById(id);
            if (algoNodes != null) {
                properties.put("script", algoNodes.getContent());
            } else {
                properties.put("script", "");
            }
        } else if (StringUtils.equalsAnyIgnoreCase(parameter.getBundle(),
                "avicit.bdp.bundle.script.python.CommonNode",
                "avicit.bdp.bundle.script.python.CommonTrainNode",
                "avicit.bdp.bundle.script.python.CommonRunnerNode",
                "avicit.bdp.phm.script.CommonPartitionNode",
                "avicit.bdp.phm.script.CommonTrainNode",
                "avicit.bdp.phm.script.CommonPredictNode",
                "avicit.bdp.phm.script.CommonNode",
                "avicit.bdp.phm.script.CommonProcessNode")) {

            TaskNodes node = processService.getTaskNodesById(id);
            if (node != null && StringUtils.isNotBlank(node.getContent())) {
                properties.put("_script_", node.getContent());
            }
            //处理国产专用机算法安装路径
            boolean isSpecialUsed = ConfigUtils.getInstance().getBoolean("piflow.special", false);
            if (isSpecialUsed && node != null) {
                properties.put("_algorithmNodeFilePath_", node.getNodeFilePath());
            }

        } else if ("avicit.bdp.phm.common.PhmJdbcSink".equalsIgnoreCase(parameter.getBundle())) {
            //PHM数据库输出，需要传入
            properties.put("_RUN_TASK_ID", instance.getId());
            properties.put("_RUN_TASK_INSTANCE_ID", instance.getProcessDefinitionId());
        }
    }

    /**
     * @param nodeComponentMap  需要参数转换的自定义组件MAP
     *                          KEY: 组件name值
     *                          value: 组件component值
     * @param properties        组件传递的值
     * @param processService    processService
     * @param postDataList      处理执行成功后数据
     * @param inputTaskNodeList 输入节点列表
     */
    private static void handleProperties(Map<String, String> nodeComponentMap,
                                         Map<String, Object> properties,
                                         ProcessService processService,
                                         List<String> postDataList,
                                         Map<String, String> taskInstanceMap,
                                         List<TaskNode> inputTaskNodeList,
                                         ProcessInstance instance) {
        if (nodeComponentMap == null || nodeComponentMap.isEmpty()) {
            return;
        }

        /*
         * 需要参数转换的自定义组件MAP
         * KEY: 组件name值
         * value: 组件component值
         */
        for (String key : nodeComponentMap.keySet()) {
            String val = MapUtils.getString(properties, key, "");
            String component = nodeComponentMap.get(key);
            if ("UserTableId".equals(component)) {
                handleUserTableId(val, component, properties, processService);
            } else if ("UserTableName".equals(component)) {
                handleUserTableName(val, properties, processService);
            } else if ("UserDataSourceId".equals(component)) {
                handleUserDataSourceId(val, properties, processService);
            } else if ("UserResourceFileSelect".equals(component)) {
                handleUserResourceFileSelect(val, key, properties, processService);
            } else if ("UserResourceSave".equals(component)) {
                handleUserResourceSave(val, key, properties, processService, postDataList);
            } else if ("UserPhmTrainModelInput".equalsIgnoreCase(component)) {
                handleUserPhmTrainModelInput(val, key, properties, processService);
            } else if ("UserFileSelect".equalsIgnoreCase(component)) {
                handleUserFileSelect(val, key, properties, instance);
            }

        }
    }

    private static void handleUserFileSelect(String val, String key, Map<String, Object> properties, ProcessInstance instance) {
        StorageResourceConf resourceConf = FileAdapterUtils.getStorageResourceByProjectId(instance.getProjectId());
        if (FileAdapterUtils.TYPE_MINIO.equals(resourceConf.getType())) {
            //oss对象 spark读取格式s3a://BUCKET_NAME/FILE_NAME
            val = "s3a://" + resourceConf.getStoragePath() + "/" + val;
            properties.put(key, val);
        }
    }


    private static void handleUserPhmTrainModelInput(String val, String key, Map<String, Object> properties, ProcessService processService) {
        //保存phm训练结果模型的名称
        properties.put("_phm_file_name_", val);
    }


    private static void handleUserResourceFileSelect(String val, String key, Map<String, Object> properties, ProcessService processService) {
        //val格式 数据库ID,
        if (StringUtils.isBlank(val)) {
            return;
        }
        Resource resource = processService.getResourceById(val);
        properties.put(key, resource.getFullName());
    }

    private static void handleUserResourceSave(String val, String key, Map<String, Object> properties, ProcessService processService, List<String> postDataList) {
        //val格式  resourceId, filename
        String[] vals = val.split(Constants.COMMA);
        if (vals.length != 2) {
            return;
        }

        Resource resource = processService.getResourceById(vals[0]);
        String fullName = resource.getFullName();
        if (StringUtils.isBlank(fullName)) {
            fullName = processService.getBaseResourceFullName(resource.getProjectId());
        }
        properties.put(key, resource.getFullName() + Constants.SINGLE_SLASH + vals[1]);

        postDataList.add("UserResourceSave-" + val);
    }

    private static void handleUserDataSourceId(String val, Map<String, Object> properties, ProcessService processService) {
        //val格式 数据库ID,
        if (StringUtils.isBlank(val)) {
            return;
        }
        BaseDataSource dataSourceCfg = processService.getDataSource(val);
        if (dataSourceCfg == null) {
            dataSourceCfg = new MySQLDataSource();
        }
        if (Constants.DB_TYPE_KAFKA.equals(dataSourceCfg.type())) {
            KafkaDataSource kafka = (KafkaDataSource) dataSourceCfg;
            properties.put("kafka_host", kafka.getAddress());
            properties.put("protocol", kafka.getProtocol());
            properties.put("user", kafka.getUser());
            properties.put("password", kafka.getPassword());
        } else if (Constants.DB_TYPE_HDFS.equals(dataSourceCfg.type())) {
            HdfsDataSource hdfs = (HdfsDataSource) dataSourceCfg;
            properties.put("hdfsUrl", hdfs.getAddress());
            properties.put("hdfsUser", hdfs.getUser());
            properties.put("hdfsBasePath", hdfs.getFilePath());
        } else {
            handleConnectionInfo(properties, dataSourceCfg);
        }

        handleKerberos(dataSourceCfg, properties);
    }

    private static void handleKerberos(BaseDataSource baseDataSource, Map<String, Object> properties) {
        //处理kerberos
        if (baseDataSource instanceof BaseKerberosDataSource) {
            BaseKerberosDataSource ds = (BaseKerberosDataSource) baseDataSource;
            properties.put("haveKerberos", ds.getKerberos() != null && ds.getKerberos() == 1);
            properties.put("krb5Path", ds.getKrb5Path());
            properties.put("keytabPath", ds.getKeytabPath());
            properties.put("principal", ds.getPrincipal());
        } else {
            properties.put("haveKerberos", false);
        }
    }

    private static void handleUserTableName(String val, Map<String, Object> properties, ProcessService processService) {
        //val格式 数据库ID,表名
        String[] vals = val.split(Constants.COMMA);
        if (vals.length != 2) {
            return;
        }
        BaseDataSource dataSourceCfg = processService.getDataSource(vals[0]);
        if (dataSourceCfg == null) {
            dataSourceCfg = new MySQLDataSource();
        }
        handleConnectionInfo(properties, dataSourceCfg);
        properties.put("dbtable", vals[1]);
        handleKerberos(dataSourceCfg, properties);
    }

    private static void handleUserTableId(String val, String component, Map<String, Object> properties, ProcessService processService) {
        if (StringUtils.isNotBlank(val)) {
            String dataSourceId = processService.getDataSourceIdByTableId(val);
            if (StringUtils.isEmpty(dataSourceId)) {
                throw new RuntimeException("模型关联数据源不存在");
            }

            BaseDataSource dataSourceCfg = processService.getDataSource(dataSourceId);
            handleDatabaseInfo(properties, dataSourceCfg);
            String table = processService.getTableNameByTableId(val);
            properties.put("dbtable", table);
        }
    }

    private static void handleDatabaseInfo(Map<String, Object> properties, BaseDataSource dataSourceCfg) {
        if (dataSourceCfg == null) {
            dataSourceCfg = new MySQLDataSource();
        }
        handleConnectionInfo(properties, dataSourceCfg);
        handleKerberos(dataSourceCfg, properties);
    }

    /**
     * 处理数据源连接信息
     *
     * @param properties properties
     * @param dataSource dataSource
     * @author xugb
     * @date 2021/1/28 15:41
     */
    private static void handleConnectionInfo(Map<String, Object> properties, BaseDataSource dataSource) {
        properties.put("user", dataSource.getUser());
        properties.put("password", dataSource.getPassword());
        properties.put("url", dataSource.getJdbcUrl());
        properties.put("driver", dataSource.driverClassSelector());
        properties.put("dbType", dataSource.type());
        properties.put("database", dataSource.getDatabase());
    }

}
