package cn.com.tt.flink.sql.engine.processor;

import cn.com.bluemoon.bd.utils.RegexUtils;
import cn.com.tt.flink.runner.AbstractProcessor;
import cn.com.tt.flink.sql.engine.pojo.*;
import cn.com.tt.flink.sql.engine.pojo.source.JobSource;
import cn.com.tt.flink.sql.engine.repository.*;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;
import org.apache.ibatis.session.SqlSession;

import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

public class LoadJobConfigProcessor extends AbstractProcessor {
    private String[] args;
    private static final String TABLE_ENV_CONFIG_PREFIX = "table_env_config:";

    public LoadJobConfigProcessor(StreamTableEnvironment tableEnv, String[] args) {
        super(tableEnv);
        this.args = args;
    }


    @Override
    public void process() throws Exception {
        try (SqlSession sqlSession = MybatisSessionFactory.getSqlSessionFactory().openSession()) {
            ParameterTool parameterTool = ParameterTool.fromArgs(args);
            //装载job配置
            constructJobConfig(sqlSession, parameterTool);
            //注册hiveCatalog
            //registerHiveCatalog(parameterTool);
            //配置tableEnv的属性
            setTableEnvConfig(parameterTool);
        }
    }

    private void constructJobConfig(SqlSession sqlSession, ParameterTool parameterTool) {
        JobUdfRepository jobUdfMapper = sqlSession.getMapper(JobUdfRepository.class);
        JobSourceRepository jobSourceMapper = sqlSession.getMapper(JobSourceRepository.class);
        JobSqlRepository jobSqlMapper = sqlSession.getMapper(JobSqlRepository.class);
        ClusterRepository clusterMapper = sqlSession.getMapper(ClusterRepository.class);
        JobSqlLocalParameterRepository localParamMapper = sqlSession.getMapper(JobSqlLocalParameterRepository.class);
        //获取作业信息
        String jobVersionId = parameterTool.get("job_version_id");
        String runConfId = parameterTool.get("run_conf_id");
        List<JobUdf> udfList = jobUdfMapper.getJobUdfList(jobVersionId);
        List<JobSource> sourceList = jobSourceMapper.getJobSourceList(jobVersionId);
        JobSql jobSql = getJobSql(jobSqlMapper, clusterMapper, localParamMapper,jobVersionId, runConfId);

        //源表的消费模型，兼容datastream api时期的配置
        String startupMode = parameterTool.get("kafka.startup.mode", "earliest-offset").toLowerCase();

        if (startupMode.equals("earliest") || startupMode.equals("latest")) {
            startupMode = startupMode + "-offset";
        }

        String todayStart = LocalDateTime.of(LocalDate.now(), LocalTime.MIN).format(DateTimeFormatter.ofPattern("yyyyMMddHHmmss"));
        String startupTimestamp = parameterTool.get("kafka.start.from.timestamp", todayStart);
        long startupTimestampMs = timestampToMs(startupTimestamp, "yyyyMMddHHmmss");
        //组装作业配置单例对象
        JobConfig jobConfig = JobConfig.getInstance();
        jobConfig.setUdfList(udfList);
        jobConfig.setSourceList(sourceList);
        jobConfig.setSqlStatement(jobSql);
        jobConfig.setScanStartupMode(startupMode);
        jobConfig.setScanStartupTimestampMs(startupTimestampMs);
    }

    private JobSql getJobSql(JobSqlRepository jobSqlMapper, ClusterRepository clusterMapper,
                             JobSqlLocalParameterRepository localParamMapper, String jobVersionId, String runConfId) {
        String sqlStatement = jobSqlMapper.getJobSqlStatement(jobVersionId);
        //sql语句使用到的集群名称、局部变量,在生成建表句时,需要获取相应的值来替换
        List<String> clusterNameList = RegexUtils.extractDatas(sqlStatement, "\\$\\{(.*?)\\}").stream().distinct().collect(Collectors.toList());
        Map<String, String> localParamMap = new HashMap<>();
        //装载集群信息
        if (!clusterNameList.isEmpty()) {
            List<JobCluster> clusterList = clusterMapper.getByNameList(clusterNameList);
            clusterList.forEach(c -> localParamMap.put(c.getClusterName(), c.getClusterAddress()));
        }

        //装载局部变量
        List<JobSqlLocalParameter> jobSqlLocalParamList = localParamMapper.getJobSqlLocalParamListByRunConfId(runConfId);
        jobSqlLocalParamList.forEach(p -> localParamMap.put(p.getParamKey(), p.getParamValue()));

        return new JobSql(sqlStatement, localParamMap);
    }

    private void registerHiveCatalog(ParameterTool parameterTool) {
        String hiveDir = parameterTool.get("hive_conf_dir", "/etc/hive-ol/conf.cloudera.hive");
        String defaultDatabase = "default";
        String hiveCatalogName = "hive";
        HiveCatalog hive = new HiveCatalog(hiveCatalogName, defaultDatabase, hiveDir,hiveDir,null);
        tableEnv.registerCatalog(hiveCatalogName, hive);
    }

    private void setTableEnvConfig(ParameterTool parameterTool) {
        //系统默认参数
        Configuration configuration = tableEnv.getConfig().getConfiguration();
        configuration.setString("table.exec.source.idle-timeout", "10 s");
        configuration.setBoolean("table.dynamic-table-options.enabled", true);
        configuration.setBoolean("table.exec.source.cdc-events-duplicate", true);
        configuration.setString("table.exec.mini-batch.enabled", "true");
        configuration.setString("table.exec.mini-batch.size", "200000000");
        configuration.setString("table.exec.mini-batch.allow-latency", "500ms");
        configuration.setString("table.optimizer.agg-phase-strategy", "TWO_PHASE");

        //获取用户自定义参数
        Map<String, String> tableEnvConfig = parameterTool.toMap()
                .entrySet()
                .stream()
                .filter(entry -> entry.getKey().startsWith(TABLE_ENV_CONFIG_PREFIX))
                .collect(Collectors.toMap(entry -> entry.getKey().replace(TABLE_ENV_CONFIG_PREFIX, ""), Map.Entry::getValue));
        //设置用户自定义配置
        tableEnvConfig.forEach(configuration::setString);
    }

    /**
     * 日期转毫秒
     *
     * @param timestamp
     * @param format
     * @return
     */
    private static long timestampToMs(String timestamp, String format) {
        DateTimeFormatter df = DateTimeFormatter.ofPattern(format);
        LocalDateTime dateTime = LocalDateTime.parse(timestamp, df);
        return dateTime.toInstant(ZoneOffset.of("+8")).toEpochMilli();
    }
}
