package com.bw.medical.realtime.util;

import com.bw.medical.realtime.common.MedicalCommon;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import com.ververica.cdc.debezium.JsonDebeziumDeserializationSchema;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.kafka.connect.json.DecimalFormat;
import org.apache.kafka.connect.json.JsonConverterConfig;

import java.util.HashMap;
import java.util.Properties;

public class CreateEnvUtil {

    public static void createOdsTable(StreamTableEnvironment tableEnv,String topicName,String groupId){
        tableEnv.executeSql("CREATE TABLE " + MedicalCommon.KAFKA_ODS_TOPIC + " (\n" +
                " `database` STRING,\n" +
                "  `table` STRING,\n" +
                "  `type` STRING,\n" +
                "  `data` map<string,string>,\n" +
                "  `old` map<string,string> \n" +
                ")" + KafkaUtil.getKafkaDDL(topicName,groupId));
    }

    /**
     * 初始化流处理环境，处理命令行参数，配置检查点
     *
     * @param port Flink 本地 WebUI 监听端口
     * @return 流处理环境
     */
    public static StreamExecutionEnvironment getStreamEnv(Integer port, String appName) {
        // TODO 1. 初始化流处理环境
        Configuration conf = new Configuration();
        conf.setInteger(RestOptions.PORT, port);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);

        // TODO 2. 检查点
        // 2.1 启用检查点
        env.enableCheckpointing(10 * 1000L);
        // 2.2 设置相邻两次检查点最小间隔
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(30 * 1000L);
        // 2.3 设置取消 Job 时检查点的清理模式
        env.getCheckpointConfig().setExternalizedCheckpointCleanup(
                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
        );
        // 2.4 设置状态后端类型
        env.setStateBackend(new HashMapStateBackend());
        // 2.5 设置检查点存储路径
        env.getCheckpointConfig().setCheckpointStorage(MedicalCommon.HDFS_URI_PREFIX + appName);
        // 2.6 设置 HDFS 用户名
        System.setProperty("HADOOP_USER_NAME", MedicalCommon.HADOOP_USER_NAME);

        return env;
    }

    /**
     * 生成 Flink-CDC 的 MysqlSource 对象
     *
     * @return MySqlSource 对象
     */
    public static MySqlSource<String> getMysqlSource() {
        // 将 Decimal 类型数据的解析格式由 BASE64 更改为 NUMERIC，否则解析报错
        // 创建配置信息 Map 集合，将 Decimal 数据类型的解析格式配置 k-v 置于其中
        HashMap config = new HashMap<>();
        config.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, DecimalFormat.NUMERIC.name());
        // 将前述 Map 集合中的配置信息传递给 JSON 解析 Schema，该 Schema 将用于 MysqlSource 的初始化
        JsonDebeziumDeserializationSchema jsonDebeziumDeserializationSchema =
                new JsonDebeziumDeserializationSchema(false, config);

        Properties prop = new Properties();
        prop.setProperty("allowPublicKeyRetrieval", "true");
        // 创建 MysqlSourceBuilder 对象
        return MySqlSource.<String>builder()
                .hostname(MedicalCommon.MYSQL_HOSTNAME)
                .port(MedicalCommon.MYSQL_PORT)
                .username(MedicalCommon.MYSQL_USERNAME)
                .password(MedicalCommon.MYSQL_PASSWD)
                .jdbcProperties(prop)
                .databaseList(MedicalCommon.MEDICAL_CONFIG_DATABASE)
                .tableList(MedicalCommon.MEDICAL_CONFIG_TABLE)
                .serverTimeZone("Asia/Shanghai")
                .startupOptions(StartupOptions.initial())
                .deserializer(jsonDebeziumDeserializationSchema)
                .build();
    }
}