package com.sh.data.engine.domain.datadev.flinkTaskAboutExexte;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.google.common.collect.Lists;
import com.sh.data.engine.common.enumDefinition.DSType;
import com.sh.data.engine.common.enumDefinition.FlinkJobStatusEnum;
import com.sh.data.engine.common.enumDefinition.FlinkNodeTypeEnum;
import com.sh.data.engine.common.exception.BusinessException;
import com.sh.data.engine.common.util.DateUtil;
import com.sh.data.engine.common.util.FlinkUtil;
import com.sh.data.engine.domain.datadev.flink.model.domain.*;
import com.sh.data.engine.domain.datadev.flink.model.param.StartFlinkTaskParam;
import com.sh.data.engine.domain.datadev.flink.service.*;
import com.sh.data.engine.domain.integration.datasource.model.domain.DataSourceDetailDomain;
import com.sh.data.engine.domain.integration.datasource.service.DataSourceService;
import com.sh.data.engine.domain.util.ConvertUtil;
import com.sh.data.engine.domain.util.JDBCUtil;
import com.sh.data.engine.domain.util.LogUtil;
import com.sh.data.engine.repository.dao.datadev.entity.FlinkMenuNodeEntity;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.commons.lang.time.DateFormatUtils;
import org.buildobjects.process.TimeoutException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;

import java.io.File;
import java.io.IOException;
import java.io.StringReader;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;

@Service
@Slf4j
public class FlinkSqlExcService {
    private static final String ESCAPE_STR = "_HUFU_FLINK_SQL_ESCAPE_";
    private static final int ESCAPE_STR_LEN = ESCAPE_STR.length();

    @Autowired
    private FlinkJobService flinkJobService;

    @Autowired
    private FlinkSqlTaskService flinkSqlTaskService;

    @Lazy
    @Autowired
    private FlinkTaskApiService flinkTaskApiService;

    @Autowired
    private DataSourceService dataSourceService;

    @Autowired
    private FlinkNodeService flinkNodeService;

    @Autowired
    private FlinkFunctionService flinkFunctionService;

    @Autowired
    private FlinkResourceService flinkResourceService;

    @Value("${flink.task.sql.filepath:/data/hufu_file_storage/flinksql}")
    private String sqlDirPath;

    @Value("${flink.log.path:/data/hufu_file_storage/log/flink}")
    private String flinkLogDir;

    // execType 0 debug模式
    public Long clientJobStart(StartFlinkTaskParam startFlinkTaskParam) {
        Long nodeId = startFlinkTaskParam.getNodeId();
        final Integer execType = startFlinkTaskParam.getExecType();
        String savePoint = startFlinkTaskParam.getSavePoint();

        // nodeId_execType
        String logFileName = String.format("%s_%s", nodeId, execType);
        String logFilePath = String.format("%s/%s.log", flinkLogDir, logFileName);

        final FlinkJobDomain flinkJobDomain = flinkJobService.getByNodeId(nodeId, execType);
        if (null != flinkJobDomain
            && Objects.equals(flinkJobDomain.getStatus(), FlinkJobStatusEnum.RUNNING.getCode())) {
            return flinkJobDomain.getId();
        }

        // 先删除旧日志
        FileUtils.deleteQuietly(new File(logFilePath));

        if (nodeId == null) {
            throw new IllegalArgumentException("nodeId can't be null");
        }

        FlinkSqlTaskDomain flinkSqlTaskDomain = flinkSqlTaskService.getActiveByNodeId(nodeId);

        if (null == flinkSqlTaskDomain) {
            throw new BusinessException("active task don't exist");
        }

        final Long jobId =
            flinkJobService.addJob(
                flinkSqlTaskDomain.getProjectId(),
                nodeId,
                flinkSqlTaskDomain.getId(),
                FlinkNodeTypeEnum.TASK_SQL.getCode(),
                execType,
                Objects.equals(execType, 0) ? null : flinkSqlTaskDomain.getNextExecTime());

        try {
            // 将产生的sql写到文件中
            String sqlFilePath = String.format("%s/%s/%s_%s.sql", sqlDirPath, nodeId, nodeId, execType);

            String scriptContent = flinkSqlTaskDomain.getScriptContent();
            List<FlinkSourceConfigDomain> sourceConfigDomainList =
                flinkSqlTaskDomain.getSourceConfigDomainList();
            List<FlinkSinkConfigDomain> sinkConfigDomainList =
                flinkSqlTaskDomain.getSinkConfigDomainList();
            final List<FlinkDimConfigDomain> dimConfigDomainList =
                flinkSqlTaskDomain.getDimConfigDomainList();
            final String envConfig = flinkSqlTaskDomain.getEnvConfig();

            // 调试模式，将原source改为fileSource，将原sink改为fileSink
            if (Objects.equals(execType, 0)) {
                if (CollectionUtils.isNotEmpty(sourceConfigDomainList)) {
                    List<FlinkSourceConfigDomain> tempSourceConfigList = Lists.newLinkedList();

                    for (FlinkSourceConfigDomain flinkSourceConfigDomain : sourceConfigDomainList) {
                        FlinkFileSourceConfigDomain fileSourceConfigDomain =
                            ConvertUtil.copyProperties(
                                flinkSourceConfigDomain, FlinkFileSourceConfigDomain.class);

                        // String csvFilePath = String.format("%s/user/flink/debug/%s/source", hdfsUrl, nodeId);
                        String csvFilePath =
                            String.format("/data/hufu_file_storage/flink_csv/debug/%s/source", nodeId);

                        fileSourceConfigDomain.setDsType(DSType.File.name());
                        fileSourceConfigDomain.setFilePath(csvFilePath);
                        fileSourceConfigDomain.setFileFormat("csv");
                        tempSourceConfigList.add(fileSourceConfigDomain);
                    }
                    sourceConfigDomainList = tempSourceConfigList;
                }

                if (CollectionUtils.isNotEmpty(sinkConfigDomainList)) {
                    List<FlinkSinkConfigDomain> tempSinkConfigList = Lists.newLinkedList();
                    for (FlinkSinkConfigDomain flinkSinkConfigDomain : sinkConfigDomainList) {
                        FlinkFileSinkConfigDomain fileSinkConfigDomain =
                            ConvertUtil.copyProperties(flinkSinkConfigDomain, FlinkFileSinkConfigDomain.class);

                        // String csvFilePath = String.format("%s/user/flink/debug/%s/sink", hdfsUrl, nodeId);
                        String csvFilePath =
                            String.format("/data/hufu_file_storage/flink_csv/debug/%s/sink", nodeId);

                        fileSinkConfigDomain.setDsType(DSType.File.name());
                        fileSinkConfigDomain.setFilePath(csvFilePath);
                        fileSinkConfigDomain.setFileFormat("csv");
                        tempSinkConfigList.add(fileSinkConfigDomain);
                    }
                    sinkConfigDomainList = tempSinkConfigList;
                }
            }

            // 自定义在外部jar的java函数对应的函数生成的sql
            final List<String> functionSqls =
                convertFunctionToSql(flinkSqlTaskDomain.getProjectId(), scriptContent);
            final List<String> extFunctionJarPathList =
                getExtFunctionJarPathList(flinkSqlTaskDomain.getProjectId(), scriptContent);
            final List<String> sourceSqlList = convertSourceDomainToSql(sourceConfigDomainList, jobId);
            final List<String> sinkSqlList = convertSinkDomainToSql(sinkConfigDomainList);
            final List<String> dimSqlList = convertDimDomainToSql(dimConfigDomainList);

            File sqlFile = new File(sqlFilePath);
            if (sqlFile.exists()) {
                sqlFile.delete();
            }

            writeToFile(sqlFile, functionSqls, sourceSqlList, sinkSqlList, dimSqlList);

            // 要是使用了hive sink,那么sql中要写完整的表名
            scriptContent =
                scriptContent
                    .replace("${hive_sink_catlog_name}", "`" + hiveSinkCatlogName + "`")
                    .replace("${hive_sink_db_name}", "`" + hiveSinkDbName + "`")
                    .replace("$ { hive_sink_catlog_name }", "`" + hiveSinkCatlogName + "`")
                    .replace("$ { hive_sink_db_name }", "`" + hiveSinkDbName + "`");

            writeToFile(sqlFile, scriptContent);
            List<String> logList = Lists.newArrayList();
            logList.add("任务调度中");
            logList.add("SOURCE:" + sourceSqlList);
            logList.add("SINK:" + sinkSqlList);
            logList.add("CONTENT:" + scriptContent);
            Thread thread =
                new Thread(
                    () -> {
                        try {
                            final FlinkUtil.StartJobParam startJobParam = new FlinkUtil.StartJobParam();
                            startJobParam.setEngineJobId(jobId);
                            startJobParam.setSqlFile(sqlFilePath);
                            startJobParam.setDebug(Objects.equals(0, execType));
                            startJobParam.setSavePoint(savePoint);

                            Properties properties = FlinkUtil.getDefaultProperties();
                            if (StringUtils.isNotBlank(envConfig)) {
                                properties.load(new StringReader(envConfig));
                                startJobParam.setExtProperties(properties);
                            }

                            if (!extFunctionJarPathList.isEmpty()) {
                                startJobParam.setExtJarList(extFunctionJarPathList);
                            }

                            final FlinkUtil.FlinkJobResult flinkJobResult = FlinkUtil.startJob(startJobParam);

                            flinkJobService.updateJob(
                                flinkJobResult.getEngineJobId(),
                                flinkJobResult.getFlinkJobIds(),
                                flinkJobResult.getYarnApplicationIds());
                            if (CollectionUtils.isNotEmpty(flinkJobResult.getFlinkJobIds())) {
                                logList.addAll(flinkJobResult.getLogList());
                                logList.add("任务已提交到flink..");
                            }
                        } catch (Exception e) {
                            log.error(e.getMessage(), e);
                            if (e instanceof TimeoutException) {
                                logList.add(
                                    String.format(
                                        "[%s] Deployment took more than 60 seconds. Please check if the requested resources are available in the YARN cluster",
                                        DateFormatUtils.format(new Date(), "yyyy-MM-dd HH:mm:ss.SSS")));
                                flinkTaskApiService.stopFlinkTask(
                                    jobId, FlinkJobStatusEnum.PAUSED.getCode(), false);
                            } else {
                                flinkTaskApiService.stopFlinkTask(
                                    jobId, FlinkJobStatusEnum.FAILURE.getCode(), false);
                                logList.add(
                                    String.format(
                                        "[%s] %s",
                                        DateFormatUtils.format(new Date(), "yyyy-MM-dd HH:mm:ss.SSS"),
                                        ExceptionUtils.getFullStackTrace(e)));
                            }
                        }
                        try {
                            // 调试异常才会写入日志
                            LogUtil.writeFlinkLog(nodeId, logList);
                        } catch (IOException ex) {
                            log.error(ex.getMessage(), ex);
                        }
                    });
            thread.start();

        } catch (Exception e) {
            flinkTaskApiService.stopFlinkTask(jobId, FlinkJobStatusEnum.FAILURE.getCode(), false);
            log.error(e.getMessage(), e);
        }

        return jobId;
    }

    private List<String> convertDimDomainToSql(List<FlinkDimConfigDomain> flinkDimConfigDomainList) {
        if (CollectionUtils.isEmpty(flinkDimConfigDomainList)) {
            return Collections.emptyList();
        }

        List<String> sqls = Lists.newLinkedList();

        for (FlinkDimConfigDomain flinkDimConfigDomain : flinkDimConfigDomainList) {
            final String dsType = flinkDimConfigDomain.getDsType();
            final DSType dsTypeEnum = DSType.valueOf(dsType);

            //      final DatasourceBaseDomain datasourceBaseDomain =
            //          datasourceBaseService.getByDsId(flinkDimConfigDomain.getDsId());

            final DataSourceDetailDomain dataSourceDetailById =
                dataSourceService.getDataSourceDetailById(flinkDimConfigDomain.getDsId(), false);
            String mappingTableName = flinkDimConfigDomain.getMappingTableName();

            StringBuilder sqlBuilder = new StringBuilder();

            switch (dsTypeEnum) {
                case Hive: {
                    String hiveSiteAddress =
                        Objects.nonNull(dataSourceDetailById.getHiveConfig())
                            ? dataSourceDetailById.getHiveConfig().getHiveSiteAddress()
                            : "";

                    if (Objects.nonNull(hiveSiteAddress)) {
                        int endIndex = hiveSiteAddress.lastIndexOf("/");
                        hiveSiteAddress = hiveSiteAddress.substring(0, endIndex);
                    } else {
                        throw new BusinessException("未配置hive-site.xml");
                    }
                    // hive db
                    String dbName = dataSourceDetailById.getDbName();
                    String hiveCatalogName = "hiveCatalog";

                    FlinkHiveDimConfigDomain flinkHiveDimConfigDomain =
                        (FlinkHiveDimConfigDomain) flinkDimConfigDomain;

                    String tableName = flinkHiveDimConfigDomain.getTableName();

                    Long timeout = flinkDimConfigDomain.getCacheTimeout();
                    if (null == timeout) {
                        timeout = 0L;
                    }

                    long minute = TimeUnit.MILLISECONDS.toMinutes(timeout);
                    if (minute < 60) {
                        minute = 60;
                    }

                    sqlBuilder
                        .append("CREATE CATALOG " + hiveCatalogName + " WITH (")
                        .append("'type'='hive',")
                        .append("'default-database' = '" + dbName + "',")
                        .append("'hive-conf-dir'='")
                        .append(hiveSiteAddress)
                        .append("'")
                        .append(");");

                    // CREATE TEMPORARY VIEW `A` AS `select * from catalog.db.table
                    // 使用了sql hint https://developer.aliyun.com/article/780766
                    sqlBuilder
                        .append("CREATE TEMPORARY VIEW ")
                        .append("`")
                        .append(mappingTableName)
                        .append("`")
                        .append(" as select ");
                    if (CollectionUtils.isNotEmpty(flinkDimConfigDomain.getFieldList())) {
                        StringJoiner stringJoiner = new StringJoiner(",");
                        for (FlinkFieldDomain flinkFieldDomain : flinkDimConfigDomain.getFieldList()) {
                            stringJoiner.add("`" + flinkFieldDomain.getField() + "`");
                        }
                        sqlBuilder.append(stringJoiner);
                    } else {
                        sqlBuilder.append(" * ");
                    }
                    sqlBuilder
                        .append(" from ")
                        .append("`")
                        .append(hiveCatalogName)
                        .append("`")
                        .append(".")
                        .append("`")
                        .append(dbName)
                        .append("`")
                        .append(".")
                        .append("`")
                        .append(tableName)
                        .append("`")
                        // .append("/*+ OPTIONS('streaming-source.enable'='true',")
                        // .append("'streaming-source.partition.include'='all',")
                        // .append("'streaming-source.monitor-interval' = '" + minute + " min',")
                        // 对应 org.apache.flink.connectors.hive.ConsumeOrder
                        // .append("'streaming-source.partition-order' = 'create-time')")
                        // .append("*/")
                        .append(";");

                    // sqlBuilder.append("set 'table.dynamic-table-options'='false';");
                }
                break;
                case MySQL:
                case Oracle: {
                    sqlBuilder.append("CREATE TABLE `");
                    sqlBuilder.append(mappingTableName).append("` ( ");
                    for (FlinkFieldDomain flinkFieldDomain : flinkDimConfigDomain.getFieldList()) {
                        sqlBuilder
                            .append("`")
                            .append(flinkFieldDomain.getField())
                            .append("` ")
                            .append(flinkFieldDomain.getType())
                            .append(",");
                    }
                    sqlBuilder = sqlBuilder.deleteCharAt(sqlBuilder.length() - 1);
                    sqlBuilder.append(" ) WITH ( ");

                    FlinkJDBCDimConfigDomain jdbcDimConfigDomain =
                        (FlinkJDBCDimConfigDomain) flinkDimConfigDomain;

                    sqlBuilder
                        .append("'connector'='jdbc','url'='")
                        .append(dataSourceDetailById.getDsLink())
                        .append("','table-name'='")
                        .append(jdbcDimConfigDomain.getTableName())
                        .append("','driver'='")
                        .append(JDBCUtil.getJDBCDriver(dsType))
                        .append("','username'='")
                        .append(dataSourceDetailById.getUsername())
                        .append("','password'='")
                        .append(dataSourceDetailById.getPassword())
                        .append("','lookup.cache.max-rows'='")
                        .append(jdbcDimConfigDomain.getCacheSize())
                        .append("','lookup.cache.ttl'='")
                        .append(jdbcDimConfigDomain.getCacheTimeout())
                        .append("ms'")
                        .append(");");
                }
                break;
                default:
            }

            sqls.add(sqlBuilder.toString());
        }

        return sqls;
    }

    private List<String> convertSourceDomainToSql(
        List<FlinkSourceConfigDomain> flinkSourceConfigDomainList, Long jobId) {

        if (CollectionUtils.isEmpty(flinkSourceConfigDomainList)) {
            return Collections.emptyList();
        }

        List<String> sqlList = Lists.newLinkedList();

        for (FlinkSourceConfigDomain flinkSourceConfigDomain : flinkSourceConfigDomainList) {
            final String dsType = flinkSourceConfigDomain.getDsType();
            final DSType dsTypeEnum = DSType.valueOf(dsType);

            DataSourceDetailDomain dataSourceDetailById =
                dataSourceService.getDataSourceDetailById(flinkSourceConfigDomain.getDsId(), false);
            StringBuilder sqlBuilder = new StringBuilder("CREATE TABLE `");
            sqlBuilder.append(flinkSourceConfigDomain.getMappingTableName()).append("` ( ");

            for (FlinkFieldDomain flinkFieldDomain : flinkSourceConfigDomain.getFieldList()) {
                sqlBuilder
                    .append("`")
                    .append(flinkFieldDomain.getField())
                    .append("` ")
                    .append(flinkFieldDomain.getType())
                    .append(",");
            }

            switch (dsTypeEnum) {
                case Kafka:
                    FlinkKafkaSourceConfigDomain kafkaSourceConfigDomain =
                        (FlinkKafkaSourceConfigDomain) flinkSourceConfigDomain;

                    final Integer offset = kafkaSourceConfigDomain.getOffset();
                    final Integer timeType = kafkaSourceConfigDomain.getTimeType();
                    final String timeField = kafkaSourceConfigDomain.getTimeField();
                    final Integer delayMs = kafkaSourceConfigDomain.getDelayMs();

                    final Set<String> fields =
                        flinkSourceConfigDomain.getFieldList().stream()
                            .map(f -> f.getField().toLowerCase())
                            .collect(Collectors.toSet());

                    // 按照Flink的处理时间处理
                    if (Objects.equals(timeType, 0)) {
                        String ptFieldName = "pt";
                        // 要是用户自定义的field名字是"pt"要不断的生成随机名字直到用户定义的名字不包含为止
                        while (fields.contains(ptFieldName)) {
                            ptFieldName =
                                String.format("pt_%s", RandomStringUtils.randomAlphabetic(2).toLowerCase());
                        }
                        sqlBuilder.append("`").append(ptFieldName).append("` AS PROCTIME()");
                    } else if (Objects.equals(timeType, 1)) { // 按照流式数据本身包含的业务时间戳处理
                        String delay = (delayMs / 1000) + "" + (delayMs % 1000);

                        sqlBuilder
                            .append("watermark for ")
                            .append(timeField)
                            .append(" as ")
                            .append(timeField)
                            .append(" - interval '")
                            .append(delay)
                            .append("' second");
                    } else {
                        sqlBuilder.deleteCharAt(sqlBuilder.length() - 1);
                    }

                    sqlBuilder.append(" ) WITH ( ");

                    sqlBuilder
                        .append("'connector'='kafka','topic'='")
                        .append(kafkaSourceConfigDomain.getTopic())
                        .append("','properties.bootstrap.servers'='")
                        .append(dataSourceDetailById.getDsLink())
                        .append("','properties.group.id'='")
                        .append(jobId)
                        .append("_")
                        .append(flinkSourceConfigDomain.getMappingTableName())
                        .append(
                            "','value.format'='json','value.json.fail-on-missing-field'='false','value.json.ignore-parse-errors'='true'");

                    // 源表是kafka的时候有5种配置用来指定起始的offset,在flink sql 中对应的属性的名字是scan.startup.mode
                    // https://ci.apache.org/projects/flink/flink-docs-release-1.12/dev/table/connectors/kafka.html#connector-options
                    switch (offset) {
                        // earliest-offset: start from the earliest offset possible.
                        case 0:
                            sqlBuilder.append(",'scan.startup.mode'='latest-offset'");
                            break;
                        // latest-offset: start from the latest offset.
                        case 1:
                            sqlBuilder.append(",'scan.startup.mode'='earliest-offset'");
                            break;
                        // timestamp: start from user-supplied timestamp for each partition.
                        case 2:
                            final String startPointTime = kafkaSourceConfigDomain.getStartPointTime();
                            final long time =
                                DateUtil.parse(startPointTime, DateUtil.DATE_PATTERN.yyyyMMddHHmmss).getTime();
                            sqlBuilder
                                .append(",'scan.startup.mode'='timestamp'")
                                .append(",'scan.startup.timestamp-millis'='")
                                .append(time)
                                .append("'");
                            break;
                        // group-offsets: start from committed offsets in ZK / Kafka brokers of a specific
                        // consumer group.需要配置能够透传到kafka的group.id属性properties.group.id
                        case 3:
                            sqlBuilder.append(",'scan.startup.mode'='group-offsets'");
                            break;
                        // specific-offsets: start from user-supplied specific offsets for each partition.
                        // 需要配置scan.startup.specific-offsets属性,格式
                        // 'partition:0,offset:42;partition:1,offset:300'
                        case 4:
                            StringBuilder specificOffset = new StringBuilder();
                            for (FlinkKafkaSourceConfigDomain.PartitionOffset partitionOffset :
                                kafkaSourceConfigDomain.getPartitionOffsetList()) {
                                specificOffset
                                    .append("partition:")
                                    .append(partitionOffset.getPartition())
                                    .append(",")
                                    .append("offset:")
                                    .append(partitionOffset.getOffset())
                                    .append(";");
                            }

                            sqlBuilder
                                .append(",'scan.startup.mode'='specific-offsets'")
                                .append(",'scan.startup.specific-offsets'='")
                                .append(specificOffset.substring(0, specificOffset.length() - 1))
                                .append("'");
                            break;
                        default:
                            throw new UnsupportedOperationException(String.format("未知的offset种类 %d", offset));
                    }

                    sqlBuilder.append(");");

                    sqlList.add(sqlBuilder.toString());
                    break;
                case File:
                    FlinkFileSourceConfigDomain fileSourceConfigDomain =
                        (FlinkFileSourceConfigDomain) flinkSourceConfigDomain;

                    sqlBuilder = sqlBuilder.deleteCharAt(sqlBuilder.length() - 1);
                    sqlBuilder.append(" ) WITH ( ");

                    sqlBuilder
                        .append("'connector'='filesystem','path'='")
                        .append(fileSourceConfigDomain.getFilePath())
                        .append("','format'='")
                        .append(fileSourceConfigDomain.getFileFormat())
                        .append("','csv.ignore-parse-errors'='true'");

                    sqlBuilder.append(");");

                    sqlList.add(sqlBuilder.toString());
                    break;
            }
        }

        return sqlList;
    }

    private String hiveSinkCatlogName;
    private String hiveSinkDbName;

    public List<String> convertSinkDomainToSql(
        List<FlinkSinkConfigDomain> flinkSinkConfigDomainList) {
        if (CollectionUtils.isEmpty(flinkSinkConfigDomainList)) {
            return Collections.emptyList();
        }

        List<String> sqls = Lists.newLinkedList();

        for (FlinkSinkConfigDomain flinkSinkConfigDomain : flinkSinkConfigDomainList) {
            final String dsType = flinkSinkConfigDomain.getDsType();
            final DSType dsTypeEnum = DSType.valueOf(dsType);

            DataSourceDetailDomain dataSourceDetailById =
                dataSourceService.getDataSourceDetailById(flinkSinkConfigDomain.getDsId(), false);
            // hive sink
            if (dsTypeEnum == DSType.Hive) {
                FlinkHiveSinkConfigDomain flinkHiveSinkConfigDomain =
                    (FlinkHiveSinkConfigDomain) flinkSinkConfigDomain;

                StringBuilder sqlBuilder = new StringBuilder();

                String catalogName = "hiveCatalog";
                hiveSinkCatlogName = catalogName;
                // String dbName = "default";
                String dbName = dataSourceDetailById.getDbName();
                hiveSinkDbName = dbName;
                String tableName = flinkHiveSinkConfigDomain.getTableName();

                // hive-site.xml目录
                String hiveSiteAddress = dataSourceDetailById.getHiveConfig().getHiveSiteAddress();
                if (org.apache.commons.lang3.StringUtils.isNotBlank(hiveSiteAddress)) {
                    if (Objects.nonNull(hiveSiteAddress)) {
                        int endIndex = hiveSiteAddress.lastIndexOf("/");
                        hiveSiteAddress = hiveSiteAddress.substring(0, endIndex);
                    } else {
                        throw new BusinessException("未配置hive-site.xml");
                    }
                }
                // 生成hive catalog
                sqlBuilder.append("CREATE CATALOG `").append(catalogName).append("`  WITH (");
                sqlBuilder.append("'type'='hive',");
                sqlBuilder.append("'default-database' = '").append(dbName).append("',");
                sqlBuilder.append("'hive-conf-dir'='").append(hiveSiteAddress).append("'");
                sqlBuilder.append(");");

                // dialect
                sqlBuilder.append(ESCAPE_STR);
                sqlBuilder.append("SET table.sql-dialect=hive;");
                sqlBuilder.append(ESCAPE_STR);

                // hive sink
                List<FlinkFieldDomain> fieldList = flinkSinkConfigDomain.getFieldList();
                long count = fieldList.stream().filter(i -> Objects.equals(1, i.getIsPartition())).count();
                // hive分区表
                if (count > 0) {
                    {
                        sqlBuilder.append(ESCAPE_STR);
                        sqlBuilder.append(" CREATE TABLE IF NOT EXISTS ");
                        sqlBuilder.append("`").append(catalogName).append("`");
                        sqlBuilder.append(".");
                        sqlBuilder.append("`").append(dbName).append("`");
                        sqlBuilder.append(".");
                        sqlBuilder.append("`").append(tableName).append("`");
                        sqlBuilder.append(" ( ");
                        for (FlinkFieldDomain flinkFieldDomain : flinkSinkConfigDomain.getFieldList()) {
                            // 分区字段 跳过
                            if (Integer.valueOf(1).equals(flinkFieldDomain.getIsPartition())) {
                                continue;
                            }
                            sqlBuilder.append("`").append(flinkFieldDomain.getField()).append("`");
                            sqlBuilder.append(" ");
                            String fieldType = flinkFieldDomain.getType();
                            // 要是hive表中的field是varchar(1)之类的,拿到的fiedldType只有单单的varchar没有长度信息
                            // 到flink中会报错因为是不能单单指定varchar还要指明对应长度
                            if ("VARCHAR".equalsIgnoreCase(fieldType)) {
                                fieldType = "STRING";
                            }
                            sqlBuilder.append(fieldType).append(",");
                        }
                        sqlBuilder.deleteCharAt(sqlBuilder.length() - 1);
                        sqlBuilder.append(" ) ");

                        // partition field
                        sqlBuilder.append(" PARTITIONED BY ( ");
                        for (FlinkFieldDomain flinkFieldDomain : flinkSinkConfigDomain.getFieldList()) {
                            // 不是分区字段 跳过
                            if (Integer.valueOf(0).equals(flinkFieldDomain.getIsPartition())) {
                                continue;
                            }

                            sqlBuilder
                                .append("`")
                                .append(flinkFieldDomain.getField())
                                .append("`")
                                .append(" ")
                                .append(flinkFieldDomain.getType())
                                .append(",");
                        }
                        sqlBuilder.deleteCharAt(sqlBuilder.length() - 1);
                        sqlBuilder.append(")");

                        // TBLPROPERTIES
                        sqlBuilder.append(
                            " TBLPROPERTIES('sink.partition-commit.policy.kind'='metastore,success-file');");
                        sqlBuilder.append(ESCAPE_STR);
                    }

                    // alter table
                    // ALTER TABLE `hiveCatalog`.`default`.`hive_table_par_mongo` SET TBLPROPERTIES
                    // ('sink.partition-commit.policy.kind'='metastore,success-file');
                    sqlBuilder.append(ESCAPE_STR);
                    sqlBuilder.append("ALTER TABLE ");
                    sqlBuilder.append("`").append(catalogName).append("`");
                    sqlBuilder.append(".");
                    sqlBuilder.append("`").append(dbName).append("`");
                    sqlBuilder.append(".");
                    sqlBuilder.append("`").append(tableName).append("`");
                    sqlBuilder.append(
                        " SET TBLPROPERTIES ('sink.partition-commit.policy.kind'='metastore,success-file');");
                    sqlBuilder.append(ESCAPE_STR);
                } else {
                    // hive非分区表
                    sqlBuilder.append(ESCAPE_STR);
                    sqlBuilder.append(" CREATE TABLE IF NOT EXISTS ");
                    sqlBuilder.append("`").append(catalogName).append("`");
                    sqlBuilder.append(".");
                    sqlBuilder.append("`").append(dbName).append("`");
                    sqlBuilder.append(".");
                    sqlBuilder.append("`").append(tableName).append("`");
                    sqlBuilder.append(" ( ");
                    for (FlinkFieldDomain flinkFieldDomain : flinkSinkConfigDomain.getFieldList()) {
                        // 分区字段 跳过
                        if (Integer.valueOf(1).equals(flinkFieldDomain.getIsPartition())) {
                            continue;
                        }
                        sqlBuilder.append("`").append(flinkFieldDomain.getField()).append("`");
                        sqlBuilder.append(" ");
                        String fieldType = flinkFieldDomain.getType();
                        // 要是hive表中的field是varchar(1)之类的,拿到的fiedldType只有单单的varchar没有长度信息
                        // 到flink中会报错因为是不能单单指定varchar还要指明对应长度
                        if ("VARCHAR".equalsIgnoreCase(fieldType)) {
                            fieldType = "STRING";
                        }
                        sqlBuilder.append(fieldType).append(",");
                    }
                    sqlBuilder.deleteCharAt(sqlBuilder.length() - 1);
                    sqlBuilder.append(" ) ");
                    sqlBuilder.append(ESCAPE_STR);
                }
                // dialect
                sqlBuilder.append(ESCAPE_STR);
                sqlBuilder.append("SET table.sql-dialect=default;");
                sqlBuilder.append(ESCAPE_STR);

                sqls.add(sqlBuilder.toString());
                continue;
            }

            StringBuilder sqlBuilder = new StringBuilder("CREATE TABLE `");
            sqlBuilder.append(flinkSinkConfigDomain.getMappingTableName()).append("` ( ");

            if (dsTypeEnum == DSType.HBase) {
                FlinkHbaseSinkConfigDomain flinkHbaseSinkConfigDomain =
                    (FlinkHbaseSinkConfigDomain) flinkSinkConfigDomain;

                // row key
                {
                    String rowKeyName = flinkHbaseSinkConfigDomain.getRowKeyName();
                    String rowKeyType = flinkHbaseSinkConfigDomain.getRowKeyType();
                    if (StringUtils.isBlank(rowKeyName) || StringUtils.isBlank(rowKeyType)) {
                        throw new IllegalArgumentException("未配置row key的name/type");
                    }

                    sqlBuilder.append("`").append(rowKeyName).append("` ").append(rowKeyType).append(",");
                }

                // 在前端需要有保证使得提交的column的column family名字在List<FlinkFieldDomain>中是连续的
                // 首先需要对离散的在相同column family的进行合并,要使用linkedHashMap保证顺序
                Map<String, List<FlinkFieldDomain>> columnFamilyName_flinkFieldDomainList =
                    new LinkedHashMap<>();

                for (FlinkFieldDomain flinkFieldDomain : flinkSinkConfigDomain.getFieldList()) {
                    List<FlinkFieldDomain> flinkFieldDomainList =
                        columnFamilyName_flinkFieldDomainList.computeIfAbsent(
                            flinkFieldDomain.getColumnFamily(), columnFamily -> new ArrayList<>());

                    flinkFieldDomainList.add(flinkFieldDomain);
                }

                for (Map.Entry<String, List<FlinkFieldDomain>> entry :
                    columnFamilyName_flinkFieldDomainList.entrySet()) {
                    String columnFamilyName = entry.getKey();
                    List<FlinkFieldDomain> flinkFieldDomainList = entry.getValue();

                    // column family  hbase的单个column family样例 StuInfo ROW <Name String,Age INT>,
                    sqlBuilder.append("`").append(columnFamilyName).append("` ROW <");

                    for (FlinkFieldDomain flinkFieldDomain : flinkFieldDomainList) {
                        sqlBuilder
                            .append("`")
                            .append(flinkFieldDomain.getField())
                            .append("` ")
                            .append(flinkFieldDomain.getType())
                            .append(",");
                    }
                    sqlBuilder.deleteCharAt(sqlBuilder.length() - 1);
                    sqlBuilder.append(">,");
                }
            } else {
                for (FlinkFieldDomain flinkFieldDomain : flinkSinkConfigDomain.getFieldList()) {
                    sqlBuilder
                        .append("`")
                        .append(flinkFieldDomain.getField())
                        .append("` ")
                        .append(flinkFieldDomain.getType())
                        .append(",");
                }
            }

            switch (dsTypeEnum) {
                case MySQL:
                case Oracle: {
                    FlinkJDBCSinkConfigDomain jdbcSinkConfigDomain =
                        (FlinkJDBCSinkConfigDomain) flinkSinkConfigDomain;

                    final Integer dataMode = jdbcSinkConfigDomain.getDataMode();
                    final List<String> primaryFields = jdbcSinkConfigDomain.getPrimaryFields();

                    if (Objects.equals(dataMode, 1) && CollectionUtils.isNotEmpty(primaryFields)) {
                        sqlBuilder.append(" primary key (");
                        for (String primaryField : primaryFields) {
                            sqlBuilder.append("`").append(primaryField).append("`,");
                        }
                        sqlBuilder = sqlBuilder.deleteCharAt(sqlBuilder.length() - 1);
                        sqlBuilder.append(") NOT ENFORCED");
                    } else {
                        sqlBuilder = sqlBuilder.deleteCharAt(sqlBuilder.length() - 1);
                    }

                    sqlBuilder.append(" ) WITH ( ");

                    sqlBuilder
                        .append("'connector'='jdbc','url'='")
                        .append(dataSourceDetailById.getDsLink())
                        .append("','table-name'='")
                        .append(jdbcSinkConfigDomain.getTableName())
                        .append("','driver'='")
                        .append(JDBCUtil.getJDBCDriver(dsType))
                        .append("','username'='")
                        .append(dataSourceDetailById.getUsername())
                        .append("','password'='")
                        .append(dataSourceDetailById.getPassword())
                        .append("');");
                    sqls.add(sqlBuilder.toString());
                }
                break;
                case Kafka: {
                    sqlBuilder = sqlBuilder.deleteCharAt(sqlBuilder.length() - 1);
                    sqlBuilder.append(" ) WITH ( ");

                    FlinkKafkaSinkConfigDomain kafkaSinkConfigDomain =
                        (FlinkKafkaSinkConfigDomain) flinkSinkConfigDomain;

                    sqlBuilder
                        .append("'connector'='kafka','topic'='")
                        .append(kafkaSinkConfigDomain.getTopic())
                        .append("','properties.bootstrap.servers'='")
                        .append(dataSourceDetailById.getDsLink())
                        .append("','value.format'='json'");

                    if (null != kafkaSinkConfigDomain.getParallel()) {
                        sqlBuilder
                            .append(",'sink.parallelism'='")
                            .append(kafkaSinkConfigDomain.getParallel())
                            .append("'");
                    }

                    sqlBuilder.append(");");

                    sqls.add(sqlBuilder.toString());
                }
                break;
                case File: {
                    sqlBuilder = sqlBuilder.deleteCharAt(sqlBuilder.length() - 1);
                    sqlBuilder.append(" ) WITH ( ");

                    FlinkFileSinkConfigDomain fileSinkConfigDomain =
                        (FlinkFileSinkConfigDomain) flinkSinkConfigDomain;

                    sqlBuilder
                        .append("'connector'='filesystem','path'='")
                        .append(fileSinkConfigDomain.getFilePath())
                        .append("','format'='")
                        .append(fileSinkConfigDomain.getFileFormat())
                        .append("'");

                    sqlBuilder.append(");");

                    sqls.add(sqlBuilder.toString());
                }
                break;
                case HBase: {
                    FlinkHbaseSinkConfigDomain flinkHbaseSinkConfigDomain =
                        (FlinkHbaseSinkConfigDomain) flinkSinkConfigDomain;

                    final Integer dataMode = flinkHbaseSinkConfigDomain.getDataMode();
                    final List<String> primaryFields = flinkHbaseSinkConfigDomain.getPrimaryFields();

                    if (Objects.equals(dataMode, 1) && CollectionUtils.isNotEmpty(primaryFields)) {
                        sqlBuilder.append(" primary key (");
                        for (String primaryField : primaryFields) {
                            sqlBuilder.append("`").append(primaryField).append("`,");
                        }
                        sqlBuilder.deleteCharAt(sqlBuilder.length() - 1);
                        sqlBuilder.append(") NOT ENFORCED");
                    } else {
                        sqlBuilder.deleteCharAt(sqlBuilder.length() - 1);
                    }

                    sqlBuilder.append(" ) WITH ( ");

                    sqlBuilder.append("'connector'='hbase-2.2',");
                    sqlBuilder
                        .append("'table-name' = '")
                        .append(flinkHbaseSinkConfigDomain.getTableName())
                        .append("',");

                    JSONObject jsonObject =
                        // JSON.parseObject( "{ \"hbase.rootdir\":\"hdfs://cdh2:9000/hbase\","+
                        //      "\"hbase.zookeeper.quorum\":\"cdh2:2181\" }");
                        JSON.parseObject(dataSourceDetailById.getConnectionProperty());

                    String zkQuorum = jsonObject.get("hbase.zookeeper.quorum").toString();
                    if (StringUtils.isBlank(zkQuorum)) {
                        throw new IllegalArgumentException(
                            String.format(
                                "id是:%d的hbase数据源 %s 没有配置hbase.zookeeper.quorum",
                                dataSourceDetailById.getId(), dataSourceDetailById.getDsName()));
                    }

                    sqlBuilder.append("'zookeeper.quorum' = '").append(zkQuorum).append("'");
                    sqlBuilder.append(");");

                    sqls.add(sqlBuilder.toString());
                }

                break;
            }
        }

        return sqls;
    }

    private List<String> convertFunctionToSql(Long projectId, String sql) {
        List<String> sqlList = Lists.newLinkedList();

        try {
            // 自定义的函数的名字
            final List<String> functionNameList = FlinkUtil.getFunctionList(sql);

            LambdaQueryWrapper<FlinkMenuNodeEntity> in = new LambdaQueryWrapper<FlinkMenuNodeEntity>().eq(
                    FlinkMenuNodeEntity::getProjectId, projectId)
                .in(FlinkMenuNodeEntity::getNodeType, FlinkNodeTypeEnum.getAllFunctionNodeTypeList())
                .in(FlinkMenuNodeEntity::getName, functionNameList);
            List<FlinkMenuNodeEntity> flinkMenuNodeDomainList = flinkNodeService.list(in);

            final List<Long> menuNodeIdList =
                flinkMenuNodeDomainList.stream()
                    .map(FlinkMenuNodeEntity::getId)
                    .collect(Collectors.toList());

            final Map<Long, String> menuNodeId_menuNodeName =
                flinkMenuNodeDomainList.stream()
                    .collect(Collectors.toMap(FlinkMenuNodeEntity::getId, FlinkMenuNodeEntity::getName));

            final List<FlinkFunctionDomain> flinkFunctionDomainList =
                flinkFunctionService.getByNodeIds(menuNodeIdList);

            for (FlinkFunctionDomain flinkFunctionDomain : flinkFunctionDomainList) {
                StringBuilder sqlBuilder = new StringBuilder();
                sqlBuilder
                    .append("CREATE TEMPORARY FUNCTION IF NOT EXISTS ")
                    .append(menuNodeId_menuNodeName.get(flinkFunctionDomain.getNodeId()))
                    .append(" AS '")
                    .append(flinkFunctionDomain.getClassName())
                    .append("' LANGUAGE JAVA;");
                sqlList.add(sqlBuilder.toString());
            }

        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }

        return sqlList;
    }

    /**
     * 得到flink sql中自定义的java函数对应的path,一般是http地址
     */
    private List<String> getExtFunctionJarPathList(Long projectId, String sql) {
        List<String> jarPathList = Lists.newLinkedList();

        try {
            final List<String> functionNameList = FlinkUtil.getFunctionList(sql);

            LambdaQueryWrapper<FlinkMenuNodeEntity> in = new LambdaQueryWrapper<FlinkMenuNodeEntity>().eq(
                    FlinkMenuNodeEntity::getProjectId, projectId)
                .in(FlinkMenuNodeEntity::getNodeType, FlinkNodeTypeEnum.getAllFunctionNodeTypeList())
                .in(FlinkMenuNodeEntity::getName, functionNameList);
            final List<FlinkMenuNodeEntity> flinkMenuNodeDomainList =
                flinkNodeService.list(in);

            final List<Long> menuNodeIdList =
                flinkMenuNodeDomainList.stream()
                    .map(FlinkMenuNodeEntity::getId)
                    .collect(Collectors.toList());

            final List<FlinkFunctionDomain> flinkFunctionDomainList =
                flinkFunctionService.getByNodeIds(menuNodeIdList);

            // 资源的id是用来得到对应的外部的jar文件
            final List<Long> resourceNodeIds =
                flinkFunctionDomainList.stream()
                    .map(FlinkFunctionDomain::getResourceNodeId)
                    .collect(Collectors.toList());

            final List<FlinkResourceDomain> flinkResourceDomainList =
                flinkResourceService.getActiveListByNodeIds(resourceNodeIds);

            jarPathList =
                flinkResourceDomainList.stream()
                    .map(flinkResourceDomain -> String.format("%s", flinkResourceDomain.getPath()))
                    .collect(Collectors.toList());

        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }

        return jarPathList;
    }

    private void writeToFile(File file, String content) throws IOException {
        if (StringUtils.isNotBlank(content)) {
            FileUtils.write(file, content, "UTF-8", true);
            FileUtils.write(file, "\n\n", "UTF-8", true);
        }
    }

    private void writeToFile(File file, List<String>... contents) throws IOException {
        if (null != contents && contents.length > 0) {
            for (List<String> content : contents) {
                if (CollectionUtils.isNotEmpty(content)) {
                    for (String s : content) {
                        //            FileUtils.write(
                        //                file,
                        //                new String(
                        //                    Base64.getEncoder().encode(s.getBytes(StandardCharsets.UTF_8)),
                        //                    StandardCharsets.UTF_8),
                        //                "UTF-8",
                        //                true);
                        FileUtils.write(file, s, "UTF-8", true);
                        FileUtils.write(file, "\n\n", "UTF-8", true);
                    }
                }
            }
        }
    }

    public static void main(String[] args) {
        String path = "/data/hufu-web/hive-site/hive-site.xml";
        int i = path.lastIndexOf("/");
        System.out.println(path.substring(0, i));
    }
}
