package cn.getech.data.development.service.impl;

import cn.getech.data.development.config.properties.DataDevelopmentConfig;
import cn.getech.data.development.constant.*;
import cn.getech.data.development.entity.*;
import cn.getech.data.development.entity.flink.*;
import cn.getech.data.development.mapper.ConfConnectMapper;
import cn.getech.data.development.mapper.RealTimeTaskMapper;
import cn.getech.data.development.mapper.TableInfoMapper;
import cn.getech.data.development.mapper.flink.*;
import cn.getech.data.development.model.dto.RealTimeTaskSaveDto;
import cn.getech.data.development.model.vo.FlinkTaskNodeVO;
import cn.getech.data.development.model.vo.RealTimeTaskVO;
import cn.getech.data.development.service.*;
import cn.getech.data.development.utils.SqlParseUtil;
import cn.getech.data.development.utils.kafka.LocalKafkaUtil;
import cn.getech.data.intelligence.common.utils.R;
import cn.getech.system.center.model.dto.SysUserDto;
import cn.getech.system.center.service.SysUserService;
import cn.getech.system.center.utils.ShiroUtils;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.CollectionUtils;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;

@Slf4j
@Service
public class RealTimeTaskServiceImpl extends ServiceImpl<RealTimeTaskMapper, RealTimeTask> implements RealTimeTaskService {

    @Resource
    private RealTimeTaskMapper mapper;
    @Autowired
    private RealTimeTaskLogService realTimeTaskLogService;
    @Autowired
    private FLinkService fLinkService;
    @Autowired
    private RealTimeTaskOpsService realTimeTaskOpsService;
    @Autowired
    private FlinkNodeUnionMapper flinkNodeUnionMapper;
    @Autowired
    private ConfConnectMapper confConnectMapper;
    @Autowired
    private TableFieldInfoService tableFieldInfoService;
    @Autowired
    private BdpRealTimeRunHistoryMapper bdpRealTimeRunHistoryMapper;
    @Autowired
    private FlinkNodeFieldMapper flinkNodeFieldMapper;
    @Autowired
    private FlinkTaskNodeService flinkTaskNodeService;
    @Autowired
    private DataDevelopmentConfig developmentConfig;
    @Autowired
    private SysUserService sysUserService;
    @Autowired
    private RealTimeTaskMapper realTimeTaskMapper;
    @Autowired
    private TableInfoMapper tableInfoMapper;
    @Autowired
    private BdpRealTableInfoMapper realTableInfoMapper;
    @Autowired
    private IBdpRealTaskAlarmService iBdpRealTaskAlarmService;
    @Autowired
    private BdpRealTableFieldMapper bdpRealTableFieldMapper;
    private static HashMap<Integer, ScheduledExecutorService> poolMap = new HashMap<>();

    @Override
    public R saveOrUpdate(RealTimeTaskSaveDto dto) {
        try {
            RealTimeTaskVO vo = mapper.getByMenuId(dto.getMenuId());
            if (vo != null) {
                if (vo.getLockState() == 1 && vo.getUpdatePer() != ShiroUtils.getUserId().intValue()) {
                    return R.error("当前任务不可编辑，已被【" + vo.getUpdatePerName() + "】上锁");
                }
                RealTimeTask task = new RealTimeTask();
                BeanUtils.copyProperties(dto, task);
                if (task.getTaskType() == null) {
                    task.setTaskType(Constant.RealTimeTaskType.FLINK.getCode());
                }
                if (task.getMainArgs() == null) {
                    task.setMainArgs("");
                }
                if (Constant.RealTimeTaskType.FLINK_SQL.getCode().equals(dto.getTaskType())) {
                    if (StringUtils.isEmpty(dto.getFlinkSQL())) {
                        String sql = wrapFlinkSql(dto.getMenuId(), 3);
                        task.setFlinkSQL(sql);
                    }
                }
                if (task.getSavepointPath() == null) {
                    task.setSavepointPath("");
                }
                if (Constant.RealTimeTaskType.REAL_TIME_COLLECTION.getCode().equals(dto.getTaskType())) {
                    if (vo.getConfig() != null) {
                        Integer sourceId = vo.getConfig().getInteger("resourceId");
                        task.setResourceId(sourceId);
                    }
                }
                task.setUpdateTime(new Date());
                task.setUpdatePer(ShiroUtils.getUserId().intValue());
                if (mapper.saveOrUpdateTask(task) > 0) {
                    if (CollectionUtils.isNotEmpty(dto.getLineList())) {
                        flinkNodeUnionMapper.delete(new QueryWrapper<FlinkNodeUnion>().eq("task_id", vo.getMenuId()));
                        for (FlinkNodeUnion flinkNodeUnion : dto.getLineList()) {
                            flinkNodeUnion.setTaskId(vo.getMenuId());
                        }
                        flinkNodeUnionMapper.batchInsert(dto.getLineList());
                    }
                }
                if (Constant.RealTimeTaskType.REAL_TIME_COLLECTION.getCode().equals(dto.getTaskType())) {
                    if (dto.getConfig() != null) {
                        String config = dto.getConfig().toJSONString();
                        mapper.updateConfig(config, task.getMenuId());
                    }
                }
                // 保存操作日志
                RealTimeTaskLog log = new RealTimeTaskLog();
                log.setTaskId(task.getMenuId());
                log.setType(Constant.RealTimeTaskLogType.UPDATE.getCode());
                log.setOperationPer(ShiroUtils.getUserId().intValue());
                realTimeTaskLogService.save(log);
                RealTimeTaskVO taskVO = getByMenuId(dto.getMenuId());
                fLinkService.saveConfig(taskVO);
                return R.okWithData(taskVO);
            } else {
                return R.error("保存失败,任务不存在");
            }
        } catch (Exception e) {
            log.error("保存实时任务出错：", e);
            return R.error("保存失败," + e.getMessage());
        }

    }

    @Override
    public R lockOrUnlock(RealTimeTaskSaveDto dto) {
        try {
            RealTimeTaskVO vo = mapper.getByMenuId(dto.getMenuId());
            if (RunStatus.RUNNING.getStatus().equals(vo.getRunState())) {
                return R.error("当前任务正在运行，不可上锁");
            }
            int userId = ShiroUtils.getUserId().intValue();
            if (dto.getLockState() == 1) {//锁定
                if (vo != null) {
                    if (vo.getLockState() == 1 && vo.getUpdatePer() != userId) {
                        return R.error("当前任务已被" + vo.getUpdatePerName() + "锁定，不可上锁");
                    }
                }
            }
            if (dto.getLockState() == 0) {//解锁
                if (vo != null) {
                    if (vo.getLockState() == 1 && vo.getUpdatePer() != userId) {
                        return R.error("必须是自己锁定的才可以解锁！");
                    }
                }
            }
            RealTimeTask task = new RealTimeTask();
            task.setMenuId(dto.getMenuId());
            task.setLockState(dto.getLockState());
            task.setUpdatePer(userId);
            mapper.updateByMenuId(task);
            return R.ok();
        } catch (Exception e) {
            log.error("保存实时任务出错：{}", e.getMessage());
        }
        return R.error("保存出错！");
    }
    @Value("${spring.kafka.bootstrap-servers:}")
    private String kafkaUrl;

    private StringBuilder wrapCreateSql(FlinkTaskNodeVO nodeVO){
        JSONObject config = nodeVO.getConfig();
        StringBuilder sql = new StringBuilder();
        Integer nodeType=nodeVO.getNodeType();
        if (FlinkNodeType.source.getId().equals(nodeType) ||
                FlinkNodeType.mysql.getId().equals(nodeType) ||
                FlinkNodeType.kafka.getId().equals(nodeType)) {
            String tableName=nodeVO.getTableName();
            Map<String, String> fieldTypeMap = new HashMap<>();
            if (config != null) {
                Integer tableId = config.getInteger("tableId");
                if (tableId != null) {
                    List<BdpRealTableField> fields = bdpRealTableFieldMapper.selectList(
                            new QueryWrapper<BdpRealTableField>().eq("table_id", tableId));
                    for (BdpRealTableField field : fields) {
                        fieldTypeMap.put(field.getFieldName(),field.getFieldTypeValue());
                    }
                }
            }

            sql = new StringBuilder("CREATE TABLE " + tableName + "( \n");
            List<FlinkNodeField> fieldList = nodeVO.getFieldList();
            if (CollectionUtils.isNotEmpty(fieldList)) {
                for (FlinkNodeField field : fieldList) {
                    if (field.getFieldType().equalsIgnoreCase("row")) {
                        if (fieldTypeMap.get(field.getFieldName()) != null) {
                            sql.append(field.getFieldName()).append(" ");
                            sql.append(fieldTypeMap.get(field.getFieldName()));
                        }
                    }else{
                        sql.append(field.getFieldName()).append(" ")
                                .append(field.getFieldType());

                    }
                    sql.append(",");
                }
                sql = new StringBuilder(sql.substring(0, sql.length() - 1));//去掉多余的逗号
            }
            if (config != null) {//添加水印
                try {
                    JSONObject waterMarkJson = config.getJSONObject("watermark");
                    String conversionFiledName = waterMarkJson.getString("conversionFiledName");
                    String conversionExpression = waterMarkJson.getString("conversionExpression");
                    String waterFiledName = waterMarkJson.getString("waterFiledName");
                    String waterExpression = waterMarkJson.getString("waterExpression");
                    if (StringUtils.isNotEmpty(conversionFiledName) &&  StringUtils.isNotEmpty(conversionExpression)) {
                        sql.append(" ,").append(conversionFiledName).append(" as ").append(conversionExpression);
                    }
                    if (StringUtils.isNotEmpty(waterFiledName) && StringUtils.isNotEmpty(waterExpression)) {
                        sql.append(" , watermark for ").append(waterFiledName).append(" as ").append(waterExpression);
                    }
                    sql.append("\n");
                } catch (Exception e) {//  兼容老数据处理
                    JSONArray watermark = config.getJSONArray("watermark");
                    if (config.getJSONArray("watermark") != null && watermark.size() >= 3) {
                        // aa AS TO_TIMESTAMP(FROM_UNIXTIME(dt, 'yyyy-MM-dd HH:mm:ss'))
                        // watermark for    ts     as     ts - interval '5' second
                        if (watermark.get(1) != null && watermark.get(2) != null) {
                            sql.append(" ,").append(watermark.get(1))
                                    .append(" ,").append(watermark.get(2)).append("\n");
                        }
                    }
                }
                sql.append(") WITH ( \n");
                String type = FlinkNodeType.mysql.getId().equals(nodeType) ? "jdbc" : "kafka";
                sql.append("'connector.type' = '").append(type).append("',");
                sql.append("'connector.version' = 'universal',");
                if (!config.containsKey("paramsList")) {//老数据
                    for (String key : config.keySet()) {//sourceId
                        if ("watermark".equals(key) ||
                                "top".equals(key) || "left".equals(key) || "uuid".equals(key) ||
                                "temporarilyLineList".equals(key)||
                                "temporarilyNodeList".equals(key)||
                                "connectorFields".equals(key) ||
                                "sourceId".equals(key)) {
                            continue;
                        }
                        if (StringUtils.isEmpty(config.getString(key))) {
                            continue;
                        }
                        if ("group.id".equals(key)){
                            sql.append("'connector.properties.group.id' = ");
                            sql.append("'").append(config.getString(key)).append("',");
                        }else {
                            sql.append("'").append(key).append("' = ");
                            sql.append("'").append(config.getString(key)).append("',");
                        }

                    }
                }else {
                    Integer tableId = config.getInteger("tableId");
                    if (tableId != null) {
                        BdpRealTableInfo realTable = realTableInfoMapper.getTableById(tableId);
                        if (realTable != null) {
                            String format = DataFormat.text.getName().equals(realTable.getFormat())
                                    ? "csv" : realTable.getFormat();
                            // 新数据配置  format.field-delimiter'='&'
                            if (DataFormat.text.getName().equals(realTable.getFormat())) {
                                sql.append("'format.field-delimiter' = '")
                                        .append(DelimitFormat.ObjOf(realTable.getDelimitId()).getExpress())
                                        .append("',");
                            }
                            sql.append("'format.type' = '").append(format).append("',");
                            sql.append("'connector.topic' = '").append(realTable.getTopicName()).append("',");
                            if (RealTableType.INNER.getCode().equals(realTable.getTableType())) {
                                sql.append("'connector.properties.bootstrap.servers' = '")
                                        .append(kafkaUrl).append("',");
                            }else{
                                ConfConnect kafka = confConnectMapper.selectById(realTable.getResourceId());
                                if (kafka != null) {
                                    sql.append("'connector.properties.bootstrap.servers' = '")
                                            .append(kafka.getKafkaUrl()).append("',");
                                }else{
                                    sql.append("'connector.properties.bootstrap.servers' = '")
                                            .append(kafkaUrl).append("',");
                                }
                            }
                        }
                    }
                    if (config.getJSONArray("paramsList") != null) {
                        for (Object param : config.getJSONArray("paramsList")) {
                            JSONObject j= (JSONObject) param;
                            if (j.getString("paramsName") != null && j.getString("paramsValue") != null) {
                                sql.append("'").append(j.getString("paramsName")).append("' = ");
                                sql.append("'").append(j.getString("paramsValue")).append("',");
                            }
                        }
                    }
                }
                sql = new StringBuilder(sql.substring(0, sql.length() - 1));//去掉多余的，
            }
            sql.append(")");
        }
        return sql;
    }

    private StringBuilder wrapSql(RealTimeTaskVO taskVO, JSONObject sqlJson, Integer op) {
        StringBuilder previewSql = new StringBuilder();
        if (CollectionUtils.isNotEmpty(taskVO.getNodeList())) {
            Map<Integer, Integer> unionMap = Maps.newHashMap();
            for (FlinkNodeUnion flinkNodeUnion : taskVO.getLineList()) {
                unionMap.put(flinkNodeUnion.getNodeFrom(), flinkNodeUnion.getNodeTo());
            }
            Map<Integer, FlinkTaskNodeVO> nodeMap = Maps.newHashMap();
            for (FlinkTaskNodeVO nodeVO : taskVO.getNodeList()) {
                nodeMap.put(nodeVO.getId(), nodeVO);
            }
            List<JSONObject> list = Lists.newArrayList();
            String sinkTableName = "";
            StringBuilder sourceSql = new StringBuilder();
            StringBuilder sinkSql = new StringBuilder();
            StringBuilder nodeSql = new StringBuilder();
            if (!Constant.RealTimeTaskType.FLINK.getCode().equals(taskVO.getTaskType())) {
                StringBuilder sources = new StringBuilder();
                StringBuilder sinks = new StringBuilder();
                List<JSONObject> hiveSourceLlist = Lists.newArrayList();
                for (FlinkTaskNodeVO nodeVO : taskVO.getNodeList()) {
                    {
                        if (FlinkNodeType.source.getId().equals(nodeVO.getNodeType())) {
                            sources.append(wrapCreateSql(nodeVO)).append(";");
                            sourceSql.append(wrapCreateSql(nodeVO)).append(" \n ");
                            nextNode(nodeMap, unionMap, nodeVO.getId(), list, 0);
                        } else if (FlinkNodeType.kafka.getId().equals(nodeVO.getNodeType()) ||
                                FlinkNodeType.hive.getId().equals(nodeVO.getNodeType()) ||
                                FlinkNodeType.kudu.getId().equals(nodeVO.getNodeType())
                        ) {
                            sinks.append(wrapCreateSql(nodeVO)).append(";");
                            sinkTableName = nodeVO.getTableName();
                            sinkSql.append(wrapCreateSql(nodeVO)).append(" \n ");
                        } else {
                            nodeSql.append(nodeVO.wrapSQL()).append(" \n ");
                        }
                        if (FlinkNodeType.hive.getId().equals(nodeVO.getNodeType()) ||
                                FlinkNodeType.kudu.getId().equals(nodeVO.getNodeType())
                        ) {
                            if (nodeVO.getConfig() != null) {
                                Integer targetTableId = nodeVO.getConfig().getInteger("tableId");
                                if (targetTableId != null) {
                                    hiveSourceLlist.add(hiveSource(targetTableId,nodeVO.getConfig()));
                                }else{
                                    log.warn("任务sink配置缺少表tableId;{}",nodeVO.getConfig().toJSONString());
                                }
                            }else{
                                log.warn("任务sink节点缺少配置信息;nodeId:{}", nodeVO.getId());
                            }
                        }
                    }
                }
                if (Constant.RealTimeTaskType.FLINK_SQL.getCode().equals(taskVO.getTaskType())) {
                    if (StringUtils.isNotEmpty(taskVO.getFlinkSQL())) {
                        //去除sql中的注释
                        String runSQL = SqlParseUtil.wrapSql(taskVO.getFlinkSQL());
                        sqlJson.put("flinkSqls", runSQL);
                    }
                    sqlJson.put("resource", hiveSourceLlist);
                }
                if (Constant.RealTimeTaskType.FLINK_SQL_DAG.getCode().equals(taskVO.getTaskType())) {
                    //最后一个表输出为sink的表
                    if (CollectionUtils.isNotEmpty(list)) {
                        list.get(list.size() - 1).put("sinkTableName", sinkTableName);
                        sqlJson.put("flinkSqls", list);
                    }
                    sqlJson.put("resource", hiveSourceLlist);
                }
                if (sources.length() > 0 && sinks.length() > 0) {
                    sources = new StringBuilder(sources.substring(0, sources.length() - 1));
                    sinks = new StringBuilder(sinks.substring(0, sinks.length() - 1));
                    sqlJson.put("flinkSources", sources.toString());
                    sqlJson.put("flinkSinks", sinks.toString());
                }
                sqlJson.put("flinkJobName", taskVO.getId() + "_menuId_" + taskVO.getMenuId());
            }
            if (op == 3) {//dag 转sql 生成的sql
                previewSql.append(nodeSql);
            } else {
                if (Constant.RealTimeTaskType.FLINK_SQL.getCode().equals(taskVO.getTaskType())) {
                    previewSql.append(taskVO.getFlinkSQL()).append("\n\n");
                    previewSql.append(sourceSql).append("\n\n");
                    previewSql.append(sinkSql);
                } else {
                    previewSql.append(sourceSql).append("\n\n");
                    previewSql.append(nodeSql).append("\n\n");
                    previewSql.append(sinkSql);
                }
            }
        }
        return previewSql;
    }

    @Override
    public String wrapFlinkSql(Integer menuId, Integer op) {
        RealTimeTaskVO taskVO = getByMenuId(menuId);
        JSONObject sqlJson = new JSONObject();
        StringBuilder previewSql = wrapSql(taskVO, sqlJson, op);
        String resSql;
        if (YesOrNoEnum.YES.getValue().equals(op)) {
            resSql = sqlJson.toJSONString();
        } else {
            resSql = previewSql.toString();
        }
        return resSql;
    }

    private JSONObject hiveSource(Integer targetTableId, JSONObject config){
        JSONObject hiveResource = new JSONObject();
        hiveResource.put("username", "hdfs");
        hiveResource.put("password", "hdfs");
        TableInfo hiveTable = tableInfoMapper.getTableById(targetTableId);
        if (hiveTable != null) {
            hiveResource.put("db", hiveTable.getDbName());//impala::
            if (StorageEngineEnum.KUDU.getCode().equals(hiveTable.getStoreEngine())) {
                hiveResource.put("tableName", "impala::" + hiveTable.getDbName() + "." + hiveTable.getTableName());
            } else {
                hiveResource.put("tableName", hiveTable.getTableName());
            }
            hiveResource.put("sinkTableName", hiveTable.getTableName());
            if (StorageEngineEnum.KUDU.getCode().equals(hiveTable.getStoreEngine())) {
                hiveResource.put("url", developmentConfig.getKuduAddresses());
                hiveResource.put("typename", "kudu");
            }else{
                hiveResource.put("url", developmentConfig.getHiveserverurl() + hiveTable.getDbName());
                hiveResource.put("typename", "hive");
            }
            hiveResource.put("outputFormat",hiveTable.getFormat());
            hiveResource.put("delimitFormat",hiveTable.getDelimitId());
            if (config.getString("collectionTimeName") != null
                    && config.getString("collectionTimeFormat") != null) {
                JSONObject collectionConfig = new JSONObject();
                collectionConfig.put("collectionTimeName", config.getString("collectionTimeName"));
                collectionConfig.put("collectionTimeFormat", config.getString("collectionTimeFormat"));
                hiveResource.put("collectionConfig", collectionConfig);
            }
        }
        List<TableFieldInfo> fieldList = tableFieldInfoService.
                list(new QueryWrapper<TableFieldInfo>().eq("table_id", targetTableId));
        hiveResource.put("isPartitionTable", false);//默认设置false
        JSONArray hiveFields = new JSONArray();
        for (TableFieldInfo tableFieldInfo : fieldList) {
            JSONObject field = new JSONObject();
            field.put("field_name", tableFieldInfo.getFieldName());
            field.put("class_type", tableFieldInfo.getFieldType());
            field.put("isPartition", false);
            // 分区字段过滤掉
            if (YesOrNoEnum.YES.getValue().equals(tableFieldInfo.getIsPartition())) {
                field.put("isPartition", true);
                hiveResource.put("isPartitionTable", true);
            }
            hiveFields.add(field);
        }
        JSONArray hiveFieldsTemp = new JSONArray();
        //添加前端配置的分区字段
        if (config.getJSONArray("fields") != null) {//动态分区配置
            JSONArray paramFields = config.getJSONArray("fields");
            for (Object hiveField : hiveFields) {
                JSONObject field = (JSONObject) hiveField;
                hiveFieldsTemp.add(field);
                for (Object paramFiled : paramFields) {
                    JSONObject temp = (JSONObject) paramFiled;
                    if (temp.getString("field_name").equals(field.getString("field_name"))) {
                        hiveFieldsTemp.remove(hiveField);
                        temp.put("class_type",field.getString("class_type"));
                        hiveFieldsTemp.add(temp);
                    }
                }
            }
            //hiveFields.addAll(config.getJSONArray("fields"));
        } else {
            hiveFieldsTemp.addAll(hiveFields);
        }
        if (config.getBoolean("isDynamicPartition") != null) {
            hiveResource.put("isDynamicPartition", config.getBoolean("isDynamicPartition"));
        } else {
            hiveResource.put("isDynamicPartition", false);
        }
        hiveResource.put("fields", hiveFieldsTemp);
        return hiveResource;
    }

    @Override
    public String wrapCollectionTask(Integer menuId) {
        JSONObject jobConfig = new JSONObject();
        RealTimeTaskVO taskVO = getByMenuId(menuId);
        JSONObject config = taskVO.getConfig();
        ConfConnect kafka = confConnectMapper.selectById(config.getInteger("resourceId"));
        JSONArray params = new JSONArray();
        if (config.getJSONArray("params") != null) {
            for (Object o : config.getJSONArray("params")) {
                JSONObject orig = (JSONObject) o;
                JSONObject param = new JSONObject();
                param.put("field_name", orig.getString("key"));
                param.put("class_type", orig.getString("value"));
                params.add(param);
            }
        }
        JSONObject param = new JSONObject();
        param.put("field_name", "bootstrap.servers");
        param.put("class_type", kafka.getKafkaUrl());
        params.add(param);
        JSONObject kafkaResource = new JSONObject();
        kafkaResource.put("topic", config.getString("topicName"));
        kafkaResource.put("startUp", config.getString("startPosition"));
        kafkaResource.put("typename", "kafka");
        kafkaResource.put("format",  kafka.getFormat());
        kafkaResource.put("delimitFormat",  kafka.getDelimitId());
        kafkaResource.put("fields", params);
        Integer targetTableId = config.getInteger("targetTableId");
        JSONObject hiveResource = hiveSource(targetTableId, config);
        hiveResource.put("appendType", config.getString("writeWay"));
        jobConfig.put("resource", hiveResource);
        jobConfig.put("consumerByteRate",config.getInteger("writeLimit"));
        jobConfig.put("kafkaResource", kafkaResource);
        jobConfig.put("flinkJobName", taskVO.getId() + "_menuId_" + taskVO.getMenuId());
        return jobConfig.toJSONString();
    }

    /**
     * 批量更新
     *
     * @param realTimeTaskIds
     * @param assigneePerId
     */
    @Override
    public void updateBatch(List<Integer> realTimeTaskIds, Integer assigneePerId) {
        realTimeTaskMapper.updateBatch(realTimeTaskIds,assigneePerId);
    }

    private void nextNode(Map<Integer, FlinkTaskNodeVO> nodeMap, Map<Integer, Integer> unionMap,
                          Integer id, List<JSONObject> list, Integer index) {
        if (unionMap.get(id) != null) {
            FlinkTaskNodeVO preNode = nodeMap.get(id);
            FlinkTaskNodeVO currentNode = nodeMap.get(unionMap.get(id));
            if (currentNode == null) {
                return;
            }
            if (FlinkNodeType.select.getId().equals(currentNode.getNodeType()) ||
                    FlinkNodeType.filter.getId().equals(currentNode.getNodeType()) ||
                    FlinkNodeType.group_by.getId().equals(currentNode.getNodeType()) ||
                    FlinkNodeType.unique.getId().equals(currentNode.getNodeType())) {
                JSONObject node = new JSONObject();
                node.put("index", index++);
                node.put("oper_type", currentNode.getNodeType());
                if (FlinkNodeType.group_by.getId().equals(currentNode.getNodeType())) {
                    node.put("groupName", currentNode.getGroupBy());
                    node.put("isWindow", YesOrNoEnum.YES.getValue().equals(currentNode.getWindowsFlag()));
                    node.put("windowType", "TUMBLE");
                    node.put("unit", currentNode.getUnit());
                    node.put("interval", currentNode.getInterval());
                    node.put("windowName", currentNode.getWindowsFieldName());
                }
                if (FlinkNodeType.unique.getId().equals(currentNode.getNodeType())) {
                    node.put("partition", currentNode.getPartitionBy());
                    node.put("order", currentNode.getOrderBy());
                    node.put("alias_partition", "row_num");//别名默认为  row_num
                }
                node.put("tableName", preNode.getTableName());
                currentNode.setTableName("temp_table_" + index);
                node.put("outputTableName", currentNode.getTableName());
                node.put("fieldText", currentNode.getExpression());
                List<JSONObject> fieldList = Lists.newArrayList();
                for (FlinkNodeField field : currentNode.getFieldList()) {
                    JSONObject fieldJson = new JSONObject();
                    fieldJson.put("index", field.getFieldOrder());
                    fieldJson.put("field_name", field.getFieldName());
                    fieldJson.put("function", field.getFieldExpression());
                    String alias = StringUtils.isEmpty(field.getFieldAlias()) ?
                            field.getFieldName() : field.getFieldAlias();
                    fieldJson.put("alias", alias);
                    fieldJson.put("class_type", field.getFieldType());
                    fieldList.add(fieldJson);
                }
                node.put("fields", fieldList);
                list.add(node);
                nextNode(nodeMap, unionMap, currentNode.getId(), list, index);
            }
        }
    }

    @Override
    public RealTimeTaskVO getByMenuId(Integer menuId) {
        try {
            Long userId = ShiroUtils.getUserId();
            RealTimeTaskVO task = mapper.getByMenuId(menuId);
            if (YesOrNoEnum.YES.getValue().equals(task.getLockState())
                    && task.getUpdatePer() != userId.intValue()) {
                task.setLockState(YesOrNoEnum.NO.getValue());
            }
            //运行中的任务  锁定
            if (RunStatus.RUNNING.getStatus().equals(task.getRunState())) {
                task.setLockState(YesOrNoEnum.NO.getValue());
            }
            List<SysUserDto> userList = sysUserService.listAllUsers();
            Map<Long, String> userMap = userList.stream().collect(
                    Collectors.toMap(SysUserDto::getUserId, SysUserDto::getUsername));
            if (!Constant.RealTimeTaskType.FLINK.getCode().equals(task.getTaskType())) {
                List<FlinkTaskNode> nodeList = flinkTaskNodeService.list(new QueryWrapper<FlinkTaskNode>()
                        .eq("task_id", menuId).orderBy(true, true, "create_time"));
                if (CollectionUtils.isNotEmpty(nodeList)) {
                    List<FlinkNodeField> fieldList = flinkNodeFieldMapper.selectList(
                            new QueryWrapper<FlinkNodeField>().in("node_id", nodeList.stream()
                                    .map(FlinkTaskNode::getId).collect(Collectors.toList())));
                    Map<Integer, List<FlinkNodeField>> mapList = fieldList.stream()
                            .collect(Collectors.groupingBy(FlinkNodeField::getNodeId));
                    for (FlinkTaskNode flinkTaskNode : nodeList) {
                        FlinkTaskNodeVO node = new FlinkTaskNodeVO(flinkTaskNode);
                        node.setCreatePerName(userMap.get(flinkTaskNode.getCreatePer()));
                        if (mapList.get(flinkTaskNode.getId()) != null) {
                            node.setFieldList(mapList.get(flinkTaskNode.getId()));
                        }
                        task.getNodeList().add(node);
                    }
                }
                String configStr = mapper.getConfigStr(menuId);
                if (StringUtils.isNotEmpty(configStr)) {
                    try {
                        JSONObject configJSON = JSONObject.parseObject(configStr);
                        task.setConfig(configJSON);
                    } catch (Exception e) {
                        log.warn("前段缓存配置出错{}", configStr);
                    }
                }
                if (StringUtils.isNotEmpty(task.getCheckResult())) {
                    try {
                        JSONObject json = JSONObject.parseObject(task.getCheckResult());
                        task.setCheckResultJson(json);
                    } catch (Exception e) {
                        log.warn("查看结果错误{}", task.getCheckResult());
                    }
                }
                List<FlinkNodeUnion> unionList = flinkNodeUnionMapper.selectList(
                        new QueryWrapper<FlinkNodeUnion>().eq("task_id", menuId));
                if (CollectionUtils.isNotEmpty(unionList)) {
                    task.setLineList(unionList);
                }
                JSONObject sqlJson = new JSONObject();
                StringBuilder previewSql = wrapSql(task, sqlJson, YesOrNoEnum.NO.getValue());
                task.setAllSql(previewSql.toString());
            }
            return task;
        } catch (Exception e) {
            log.error("获取实时任务详情出错：", e);
        }
        return null;
    }

    @Override
    public int updateByMenuId(RealTimeTask task) {
        return mapper.updateByMenuId(task);
    }

    @Autowired
    private DataPermissionService dataPermissionService;

    private R writePermissionCheck(RealTimeTaskVO task){
        if (CollectionUtils.isNotEmpty(task.getNodeList())) {
            List<FlinkTaskNodeVO> nodes = task.getNodeList().stream().filter(s ->
                    FlinkNodeType.kafka.getId().equals(s.getNodeType()) ||
                            FlinkNodeType.kudu.getId().equals(s.getNodeType()) ||
                            FlinkNodeType.hive.getId().equals(s.getNodeType()) ||
                            FlinkNodeType.mysql.getId().equals(s.getNodeType())).collect(Collectors.toList());
            long sinkNum = nodes.size();
            if (sinkNum < 1) {
                return R.error("缺少输出节点");
            }
            for (FlinkTaskNodeVO node : nodes) {
                if (node.getConfig() != null) {
                    Integer tableId = node.getConfig().getInteger("tableId");
                    if (tableId != null) {
                        if (FlinkNodeType.kafka.getId().equals(node.getNodeType())) {
                            Long userId = ShiroUtils.getUserId();
                            BdpRealTableInfo realTable = realTableInfoMapper.getTableById(tableId);
                            if (realTable == null) {
                                return R.error("配置的表已经不存在");
                            }
                            if ( userId != null && !userId.equals(realTable.getCreateUser())
                                    && !dataPermissionService.checkDataPermission(tableId,
                                    PermissionTypeEnum.REAL_TABLE.getType(), PermissionTypeEnum.WRITE.getType())
                                    && RealTableType.INNER.getCode().equals(realTable.getTableType())
                            ) {
                                return R.error("当前用户无表" + realTable.getTableName() + "的写入权限");
                            }
                        }
                    }

                }
            }
        }
        return null;
    }
    
    private R verificationParam(RealTimeTaskVO task) {
        if (task == null) {
            return R.error("任务不存在");
        }
        if (task.getRunState().equals(RunStatus.RUNNING.getStatus())) {
            return R.error("任务正在运行中！");
        }
        if (poolMap.containsKey(task.getMenuId())) {
            return R.error("任务正在提交中，稍后尝试，或先停止任务再尝试");
        }
        if (Constant.RealTimeTaskType.FLINK.getCode().equals(task.getTaskType())) {
            if (task.getResourceId() == null || StringUtils.isEmpty(task.getMainFunction())) {
                return R.error("任务main函数不能为空，资源不能为空");
            }
        } else if (Constant.RealTimeTaskType.FLINK_SQL.getCode().equals(task.getTaskType())) {
            if (StringUtils.isEmpty(task.getFlinkSQL())) {
                return R.error("sql不能为空");
            }
            String runSQL = SqlParseUtil.wrapSql(task.getFlinkSQL());
            if (runSQL.contains("\n")) {
                runSQL = runSQL.replace("\n", "");
            }
            if (StringUtils.isEmpty(runSQL)) {
                return R.error("sql不能为空");
            }
            if (CollectionUtils.isEmpty(task.getNodeList())) {
                return R.error("输入输出不能为空");
            }
            long sourceNum = task.getNodeList().stream().filter(s ->
                    FlinkNodeType.source.getId().equals(s.getNodeType())).count();
            if (sourceNum < 1) {
                return R.error("缺少输入节点");
            }
            return writePermissionCheck(task);

        } else if (Constant.RealTimeTaskType.FLINK_SQL_DAG.getCode().equals(task.getTaskType())) {
            if (CollectionUtils.isEmpty(task.getNodeList())) {
                return R.error("任务节点不能为空");
            }
            if (CollectionUtils.isEmpty(task.getLineList())) {
                return R.error("任务节点关联关系不能为空");
            }
            long sourceNum = task.getNodeList().stream().filter(s ->
                    FlinkNodeType.source.getId().equals(s.getNodeType())).count();
            if (sourceNum < 1) {
                return R.error("缺少数据源节点");
            }
            long sinkNum = task.getNodeList().stream().filter(s ->
                    FlinkNodeType.kafka.getId().equals(s.getNodeType())
                            || FlinkNodeType.kudu.getId().equals(s.getNodeType()) ||
                            FlinkNodeType.hive.getId().equals(s.getNodeType())).count();
            if (sinkNum < 1) {
                return R.error("缺少sink节点");
            }
            JSONObject sqlJson = new JSONObject();
            StringBuilder previewSql = wrapSql(task, sqlJson, 1);
            if (!sqlJson.containsKey("flinkSqls") ||
                    sqlJson.getJSONArray("flinkSqls") == null) {
                return R.error("sql 信息不完整，确认节点信息是否连接完整 \n" + previewSql.toString());
            }
            if (sqlJson.getJSONArray("flinkSqls").size() != (task.getNodeList().size() - sourceNum - sinkNum)) {
                return R.error("sql 解析错误，确认节点信息是否连接完整 \n" + previewSql.toString());
            }
            return writePermissionCheck(task);
        }else if (Constant.RealTimeTaskType.REAL_TIME_COLLECTION.getCode().equals(task.getTaskType())) {
            JSONObject config = task.getConfig();
            if (config == null) {
                return R.error("缺少任务配置信息");
            }
            if (config.getInteger("resourceId") == null) {
                return R.error("缺少资源配置信息");
            }
            if (config.getInteger("targetTableId") == null) {
                return R.error("缺少hive表配置信息");
            }
        }
        return null;
    }

    @Override
    public R localRun(Integer menuId) {
        RealTimeTaskVO task = getByMenuId(menuId);
        R r = verificationParam(task);
        if (r != null) {
            return r;
        }
        Map<String, String> runResult = fLinkService.localRun(task);
        //-- 日志信息添加
        String logfile = runResult.get("logfile");
        RealTimeTask statusTaskLog = new RealTimeTask();
        Date now = new Date();
        Integer userId = ShiroUtils.getUserId().intValue();
        statusTaskLog.setRunTime(now);
        statusTaskLog.setUpdatePer(userId);
        statusTaskLog.setMenuId(menuId);
        statusTaskLog.setJobId(logfile);
        statusTaskLog.setApplicationId(null);
        statusTaskLog.setRunState(RunStatus.RUNNING.getStatus());
        mapper.updateByMenuId(statusTaskLog);
        return R.okWithData(mapper.getByMenuId(menuId));
    }

    @Override
    public R run(Integer menuId) throws Exception {
        RealTimeTaskVO task = getByMenuId(menuId);
        R r = verificationParam(task);
        if (r != null) {
            return r;
        }
        // 保存操作日志
        RealTimeTaskLog taskLog = new RealTimeTaskLog();
        taskLog.setTaskId(menuId);
        taskLog.setType(Constant.RealTimeTaskLogType.RUN.getCode());
        Integer userId = ShiroUtils.getUserId().intValue();
        taskLog.setOperationPer(userId);
        realTimeTaskLogService.save(taskLog);

        Date now = new Date();

        BdpRealTimeRunHistory history = new BdpRealTimeRunHistory();
        history.setTaskId(task.getId());
        history.setRunState(task.getRunState());
        bdpRealTimeRunHistoryMapper.insertOrUpdateSelective(history);
        RealTimeTask testTemp = new RealTimeTask();
        testTemp.setRunTime(now);
        testTemp.setMenuId(menuId);
        testTemp.setRunLogId(history.getId());
        testTemp.setUpdatePer(userId);
        mapper.updateByMenuId(testTemp);

        Map<String, String> runResult = fLinkService.run(task);
        HashMap<Integer, String> applicationIdMap = Maps.newHashMap();
        HashMap<Integer, Integer> applicationNum = Maps.newHashMap();//记录失败次数

        //-- 日志信息添加
        String logfile = runResult.get("logfile");
        RealTimeTask statusTaskLog = new RealTimeTask();
        statusTaskLog.setMenuId(menuId);
        statusTaskLog.setJobId(logfile);
        mapper.updateByMenuId(statusTaskLog);

        ScheduledExecutorService pool = Executors.newScheduledThreadPool(1);

        pool.scheduleAtFixedRate(() -> {
            RealTimeTask statusTask = new RealTimeTask();
            String applicationId = applicationIdMap.get(menuId);
            if ("applicationId".equals(applicationId) || StringUtils.isEmpty(applicationId)) {
                applicationId = fLinkService.getAppId(logfile);
                if (StringUtils.isNotEmpty(applicationId) &&
                        !applicationId.startsWith("application_")) {
                    log.info("menuId:{},sppId解析错误:\n 日志内容:{}", menuId, logfile);
                    applicationId = "applicationId";
                }
                applicationIdMap.put(menuId, applicationId);
            }
            Integer num = applicationNum.get(menuId);
            log.info("重试次数{}", num);
            if (num == null) {
                num = 0;
            }
            applicationNum.put(menuId, ++num);
            if (StringUtils.isNotEmpty(applicationId) &&
                    applicationId.startsWith("application_") &&
                    !"applicationId".equals(applicationId)) {
                Integer status = realTimeTaskOpsService.getStatus(applicationId);
                //刚开始任务还在提交
                statusTask.setMenuId(menuId);
                statusTask.setJobId(logfile);
                if (applicationId.contains("\n")) {
                    applicationId = applicationId.replaceAll("\n", "");
                }
                statusTask.setApplicationId(applicationId);
                statusTask.setRunState(status);
                mapper.updateByMenuId(statusTask);
                if (RunStatus.FAIL.getStatus().equals(status) ||
                        RunStatus.SUCCESS.getStatus().equals(status) ||
                        RunStatus.KILLED.getStatus().equals(status)) {
                    history.setRunState(status);
                    history.setEndTime(new Date());
                    bdpRealTimeRunHistoryMapper.insertOrUpdateSelective(history);
                    pool.shutdown();
                    poolMap.remove(menuId);
                    //检测是否报警
                    iBdpRealTaskAlarmService.cheackIsAlarmAndPushMsg(menuId,status);
                }
                if (RunStatus.RUNNING.getStatus().equals(status)) {
                    history.setStartTime(now);
                    history.setRunState(status);
                    bdpRealTimeRunHistoryMapper.insertOrUpdateSelective(history);
                    pool.shutdown();
                    poolMap.remove(menuId);
                }
            }
            if (num > 24) {//失败24次,两分钟后就关闭循环
                log.info("menuId:{},任务提交失败\n", menuId);
                pool.shutdown();
                poolMap.remove(menuId);
            }

        }, 2, 5, TimeUnit.SECONDS);//每五秒检查一次状态
        poolMap.put(menuId, pool);
        return R.okWithData(mapper.getByMenuId(menuId));
    }

    @Override
    public R reRun(Integer menuId) throws Exception {
        R r = verificationParam(getByMenuId(menuId));
        if (r != null) {
            return r;
        }
        cancel(menuId);
        ExecutorService pool = Executors.newSingleThreadExecutor();
        pool.execute(() -> {
            try {
                run(menuId);
            } catch (Exception e) {
                log.error("重跑失败", e);
            }
        });
        return R.ok();
    }

    @Override
    public R cancel(Integer menuId) throws Exception {
        RealTimeTaskVO task = mapper.getByMenuId(menuId);
        if (poolMap.get(menuId) != null) {
            poolMap.get(menuId).shutdown();
            poolMap.remove(menuId);
        }
        if (task.getApplicationId() != null) {
            fLinkService.cancel(task);
        }
        // 更新实时任务运行状态
        RealTimeTask updateEdit = new RealTimeTask();
        updateEdit.setMenuId(menuId);
        updateEdit.setRunState(RunStatus.KILLED.getStatus());
        mapper.updateByMenuId(updateEdit);
        if (task.getRunLogId() != null) {
            BdpRealTimeRunHistory history = new BdpRealTimeRunHistory();
            history.setId(task.getRunLogId());
            history.setEndTime(new Date());
            history.setRunState(RunStatus.KILLED.getStatus());
            bdpRealTimeRunHistoryMapper.updateById(history);
        }
        // 保存操作日志
        RealTimeTaskLog log = new RealTimeTaskLog();
        log.setTaskId(menuId);
        log.setType(Constant.RealTimeTaskLogType.STOP.getCode());
        log.setOperationPer(ShiroUtils.getUserId().intValue());
        realTimeTaskLogService.save(log);
        return R.ok();
    }

    @Override
    public R pause(Integer menuId) {
        RealTimeTaskVO task = mapper.getByMenuId(menuId);
        if (!RunStatus.RUNNING.getStatus().equals(task.getRunState())) {
            return R.error("任务未运行，无法暂停");
        }
        if (StringUtils.isEmpty(task.getApplicationId())) {
            return R.error("任务未不存在applicationId，无法暂定");
        }
        if (poolMap.get(menuId) != null) {
            poolMap.get(menuId).shutdown();
            poolMap.remove(menuId);
        }
        String msg = fLinkService.pause(task);
        if (StringUtils.isNotEmpty(msg)) {
            return R.error(msg);
        }
        // 保存操作日志
        RealTimeTaskLog log = new RealTimeTaskLog();
        log.setTaskId(menuId);
        log.setType(Constant.RealTimeTaskLogType.STOP.getCode());
        log.setOperationPer(ShiroUtils.getUserId().intValue());
        realTimeTaskLogService.save(log);
        return R.ok();
    }

    @Override
    public R checkSource(Integer menuId) {
        RealTimeTaskVO task = mapper.getByMenuId(menuId);
        List<FlinkTaskNode> nodeList = flinkTaskNodeService.list(new QueryWrapper<FlinkTaskNode>()
                .eq("node_type",FlinkNodeType.source.getId())
                .eq("task_id", menuId).orderBy(true, true, "create_time"));
        JSONObject checkResult = new JSONObject();
        Integer check = 0;
        if (CollectionUtils.isNotEmpty(nodeList)) {
            for (FlinkTaskNode node : nodeList) {
                    if (StringUtils.isNotEmpty(node.getNodeParam())) {
                        JSONObject config = new JSONObject();
                        try {
                            config = JSONObject.parseObject(node.getNodeParam());
                        } catch (Exception ignored) {
                        }
                        Integer tableId = config.getInteger("tableId");
                        if (tableId != null) {
                            BdpRealTableInfo realTable = realTableInfoMapper.getTableById(tableId);
                            if (realTable != null) {
                                if (DataFormat.text.getName().equals(realTable.getFormat())) {
                                    checkResult.put(realTable.getTableName(), "格式为text,无法校验");
                                    check = 2;
                                    continue;
                                }
                                String kurl = kafkaUrl;
                                String topic = realTable.getTopicName();
                                if (RealTableType.OUTER.getCode().equals(realTable.getTableType())) {
                                    ConfConnect kafka = confConnectMapper.selectById(realTable.getResourceId());
                                    kurl = kafka.getKafkaUrl();
                                }
                                List<Object> dataList = LocalKafkaUtil.viewData(kurl, topic, "json", 1);
                                if (CollectionUtils.isNotEmpty(dataList)) {
                                    checkResult.put(realTable.getTableName(), "校验成功");
                                    if (check != 2) {
                                        check = 1;
                                    }
                                    JSONObject json = null;
                                    try {
                                        json = (JSONObject) dataList.get(0) ;
                                        List<BdpRealTableField> fields = bdpRealTableFieldMapper.selectListByTableIds(Collections.singleton(tableId));
                                        for (BdpRealTableField field : fields) {
                                            if (!json.keySet().contains(field.getFieldName())) {
                                                log.info("field:{}",json.keySet());
                                                checkResult.put(realTable.getTableName(), "校验失败，字段" + field.getFieldName() + "在source中不存在");
                                                check = 2;
                                                break;
                                            }
                                        }
                                        List<String> fieldNames = fields.stream().map(BdpRealTableField::getFieldName).collect(Collectors.toList());
                                        for (String s : json.keySet()) {
                                            if (fieldNames.contains(s) && json.get(s) == null) {
                                                checkResult.put(realTable.getTableName(), "校验失败，" + s + "值为空");
                                                check = 2;
                                            }
                                        }
                                    } catch (Exception e) {
                                        checkResult.put(realTable.getTableName(), "校验失败，数据非json格式");
                                        check = 2;
                                    }
                                }else{
                                    checkResult.put(realTable.getTableName(), "无数据，无法检测");
                                    check = 2;
                                }
                            }
                        } else {
                            checkResult.put(node.getNodeName(), "输入表信息不存在,无法校验");
                            check = 2;
                        }
                } else {
                        check = 2;
                        checkResult.put(node.getNodeName(), "输入表信息不存在,无法校验");
                        mapper.updateCheck(check, checkResult.toJSONString(), menuId);
                        return R.error("非kafka输入表，无法检测");
                    }
            }
        } else {
            check = 2;
            checkResult.put(task.getTaskName(), "输入表信息不存在,无法校验");
            mapper.updateCheck(check, checkResult.toJSONString(), menuId);
            return R.error("无输入信息，无法检测");
        }
        mapper.updateCheck(check, checkResult.toJSONString(), menuId);
        return new R().put("check", check);
    }
}
