package cn.tannn;


import cn.tannn.constant.FlinSqlConstant;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.SqlDialect;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * post
 */
public class PgSQLAndMySQL2MySQL {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(3000);
        // 高级选项：
        // 设置模式为exactly-once （这是默认值）
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        // 确保检查点之间有至少500 ms的间隔【checkpoint最小间隔】
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(1500);
        // 检查点必须在一分钟内完成，或者被丢弃【checkpoint的超时时间】
        env.getCheckpointConfig().setCheckpointTimeout(60000);
        // 同一时间只允许进行一个检查点
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(1);
        // 表示一旦Flink处理程序被cancel后，会保留Checkpoint数据，以便根据实际需要恢复到指定的Checkpoint【详细解释见备注】
        //ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION:表示一旦Flink处理程序被cancel后，会保留Checkpoint数据，以便根据实际需要恢复到指定的Checkpoint
        //ExternalizedCheckpointCleanup.DELETE_ON_CANCELLATION: 表示一旦Flink处理程序被cancel后，会删除Checkpoint数据，只有job执行失败的时候才会保存checkpoint
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        env.setParallelism(1);

        EnvironmentSettings Settings = EnvironmentSettings.newInstance()
                .useBlinkPlanner()
                .inStreamingMode()
                .build();

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, Settings);
        tableEnv.getConfig().getConfiguration().setString(FlinSqlConstant.JOB_NAME_KEY,"PgSQLAndMySQL2MySQL");
        tableEnv.getConfig().setSqlDialect(SqlDialect.DEFAULT);
        // 数据源表 用户数据
        String sourceUserDDL =  "CREATE TABLE mysql_user_binlog ( " +
                                " id INT NOT NULL, " +
                                " name STRING, " +
                                " class_no STRING, " +
                                " primary key (id) not enforced " +
                                ") WITH ( " +
                                " 'connector' = 'mysql-cdc', " +
                                " 'hostname' = '192.168.31.61', " +
                                " 'port' = '3306', " +
                                " 'username' = 'root', " +
                                " 'password' = 'root', " +
                                " 'database-name' = 'flink_sql', " +
                                " 'table-name' = 'user' " +
                                ")";
        // 数据源表 班级数据 pgsql
        String sourceClassesDDL =  "CREATE TABLE postgres_class_binlog ( " +
                " id INT NOT NULL, " +
                " class_name STRING, " +
                " class_no STRING, " +
                " primary key (id) not enforced " +
                ") WITH ( " +
                " 'connector' = 'postgres-cdc', " +
                " 'hostname' = '192.168.31.61', " +
                " 'port' = '5432', " +
                " 'username' = 'postgres', " +
                " 'password' = 'root', " +
                " 'database-name' = 'flink', " +
                " 'schema-name' = 'public', " +
                " 'table-name' = 'classes', " +
                " 'decoding.plugin.name' = 'pgoutput' " +
//                " 'debezium.slot.name' = '***' " +
//                                " , 'scan.startup.mode' = 'latest-offset' " + // 默认全量加增量
                ")";
        // 输出目标表 聚合用户跟class信息
        String sinkDDL =
                "CREATE TABLE user_class_sink ( " +
                        " id INT, " +
                        " name STRING, " +
                        " class_name STRING, " +
                        " primary key (id) not enforced " +
                        ") WITH ( " +
                        " 'connector' = 'jdbc', " +
                        " 'driver' = 'com.mysql.cj.jdbc.Driver', " +
                        " 'url' = 'jdbc:mysql://192.168.31.61:3306/flink?serverTimezone=Asia/Shanghai&useSSL=false'," +
                        " 'username' = 'root', " +
                        " 'password' = 'root', " +
                        " 'table-name' = 'user' " +
                        ")";

        // 简单的聚合处理
        String transformDmlSQL =  "insert into user_class_sink select mu.id as id, mu.name as name ,pc.class_name as class_name " +
                " from mysql_user_binlog as mu left join postgres_class_binlog as pc" +
                " on mu.class_no = pc.class_no";

        TableResult tableResult = tableEnv.executeSql(sourceUserDDL);
        TableResult tableResult1 = tableEnv.executeSql(sourceClassesDDL);
        TableResult tableResult2 = tableEnv.executeSql(sinkDDL);
        TableResult tableResult3 = tableEnv.executeSql(transformDmlSQL);
    }

}
