package com.wudl.hudi.sink;

//import org.apache.flink.api.common.restartstrategy.RestartStrategies;
//import org.apache.flink.runtime.state.filesystem.FsStateBackend;
//import org.apache.flink.streaming.api.CheckpointingMode;
//import org.apache.flink.streaming.api.environment.CheckpointConfig;
//import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
//import org.apache.flink.table.api.EnvironmentSettings;
//import org.apache.flink.table.api.Table;
//import org.apache.flink.table.api.TableResult;
//import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
//
//import static org.apache.flink.table.api.Expressions.$;

import com.wudl.hudi.utils.MyKafkaUtil;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;

import static org.apache.flink.table.api.Expressions.$;

/**
 * @author ：wudl
 * @date ：Created in 2022-02-19 22:18
 * @description：
 * @modified By：
 * @version: 1.0
 */

public class MysqlJoinMysqlHuDi {
    public static void main(String[] args) throws Exception {
        // 1-获取表执行环境getExecutionEnvironment
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // TODO： 由于增量将数据写入到Hudi表，所以需要启动Flink Checkpoint检查点
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);


        // 1.1 开启CK
        env.enableCheckpointing(5000L);
        env.getCheckpointConfig().setCheckpointTimeout(10000L);
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);
        //正常Cancel任务时,保留最后一次CK
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        //重启策略
        env.setRestartStrategy(RestartStrategies.fixedDelayRestart(3, 5000L));
        //状态后端
        env.setStateBackend(new FsStateBackend("hdfs://192.168.1.161:8020/flink-hudi/ck"));
        //设置访问HDFS的用户名
        System.setProperty("HADOOP_USER_NAME", "root");

        // 2-创建输入表，TODO：从Kafka消费数据
        tableEnv.executeSql(
                "CREATE TABLE IF NOT EXISTS   source_mysql ( " +
                        "  id BIGINT  primary key NOT ENFORCED ," +
                        "  name string," +
                        "  age int ," +
                        "  birthday TIMESTAMP(3)," +
                        "  ts TIMESTAMP(3)" +
                        ") WITH ( " +
                        " 'connector' = 'mysql-cdc', " +
                        " 'hostname' = '192.168.1.162', " +
                        " 'port' = '3306', " +
                        " 'username' = 'root', " +
                        " 'password' = '123456', " +
                        " 'server-time-zone' = 'Asia/Shanghai', " +
                        " 'scan.startup.mode' = 'initial', " +
                        " 'database-name' = 'wudldb', " +
                        " 'table-name' = 'Flink_cdc' " +
                        " )");

        tableEnv.executeSql(
                "CREATE TABLE IF NOT EXISTS   source_mysql_Flink_cdd ( " +
                        "  id BIGINT  primary key NOT ENFORCED ," +
                        "  phone string," +
                        "  address string ," +
                        "  ts TIMESTAMP(3)" +
                        ") WITH ( " +
                        " 'connector' = 'mysql-cdc', " +
                        " 'hostname' = '192.168.1.162', " +
                        " 'port' = '3306', " +
                        " 'username' = 'root', " +
                        " 'password' = '123456', " +
                        " 'server-time-zone' = 'Asia/Shanghai', " +
                        " 'scan.startup.mode' = 'initial', " +
                        " 'database-name' = 'wudldb', " +
                        " 'table-name' = 'Flink_cdd' " +
                        " )");


//        tableEnv.executeSql("SELECT b.id id,b.name name,b.age age,b.birthday,a.phone phone,a.address address,a.ts ts FROM source_mysql_Flink_cdd  a INNER JOIN   source_mysql b ON a.id = b.id ").print();

//        Table etlTable = tableEnv.from("source_mysql");
//        tableEnv.createTemporaryView("mysqlSourceView", etlTable);
//        Table source_mysql_flink_cddTable = tableEnv.from("source_mysql_Flink_cdd");
//
//        tableEnv.createTemporaryView("Viewsource_mysql_Flink_cdd",source_mysql_flink_cddTable);
//
//        tableEnv.executeSql("select * from mysqlSourceView").print();
//
//        tableEnv.sqlQuery("select * from mysqlSourceView ").printSchema();

//        tableEnv.executeSql("select * from source_mysql ").print();

        // 3-转换数据：可以使用SQL，也可以时Table API
//        Table etlTable = tableEnv
//                .from("users_source_mysql")
//                // 添加字段：Hudi表数据合并字段，时间戳, "orderId": "20211122103434136000001" ->  20211122103434136
//                .addColumns(
//                        $("birthday").substring(0, 10).as("ts")
//                )
//                // 添加字段：Hudi表分区字段， "orderTime": "2021-11-22 10:34:34.136" -> 021-11-22
//                .addColumns(
//                        $("birthday").substring(0, 10).as("partition_day")
//                );
// /b.id id,b.name name,b.age age,b.birthday,a.phone phone,a.address address,a.ts ts
//        Table sourcecdcTable = tableEnv
//                .from("source_mysql")
//                .dropColumns($("birthday")
//                ).addColumns($("ts").as("newbirthday")
//                );
//        Table Flink_cdd = tableEnv.from("source_mysql_Flink_cdd")
//                .dropColumns(
//                        $("ts"), $("id")
//                );


        String joinSql = "SELECT b.id id,b.name name,b.age age,CAST(b.birthday as STRING) birthday ,a.phone phone,a.address address,CAST(a.ts AS STRING) ts FROM source_mysql_Flink_cdd  a INNER JOIN   source_mysql b ON a.id = b.id";
        Table tableMysqlJoin = tableEnv.sqlQuery(joinSql);



        tableEnv.createTemporaryView("viewFlinkCdc",tableMysqlJoin);
//        // 4-创建输出表，TODO: 关联到Hudi表，指定Hudi表名称，存储路径，字段名称等等信息
//        tableEnv.executeSql(
//                "CREATE TABLE myslqjoinmysqlhudiSink (" +
//                        " id BIGINT PRIMARY KEY NOT ENFORCED," +
//                        " name STRING," +
//                        " age INT," +
//                        " birthday STRING," +
//                        " phone STRING," +
//                        " address STRING," +
//                        " ts STRING" +
//                        ")" +
//                        "WITH (" +
//                        " 'connector' = 'hudi'," +
////                        " 'path' = 'file:///D:/myslqjoinmysqlhudiSink'," +
//                      " 'path' = 'hdfs://192.168.1.161:8020/hudi-warehouse/myslqjoinmysqlhudiSink' ,\n" +
//                        " 'table.type' = 'MERGE_ON_READ'," +
//                        " 'write.operation' = 'upsert'," +
//                        " 'hoodie.datasource.write.recordkey.field'= 'id'," +
//                        " 'write.precombine.field' = 'ts'," +
//                        " 'write.tasks'= '1'" +
//                        ")"
//        );

        tableEnv.executeSql("CREATE TABLE myslqjoinmysqlhudiSink(\n" +
                "id bigint ,\n" +
                "name string,\n" +
                "age int,\n" +
                "birthday STRING,\n" +
                "phone STRING,\n" +
                "address STRING,\n" +
                "ts TIMESTAMP(3),\n" +
                "primary key(id) not enforced\n" +
                ")\n" +
                "with(\n" +
                "'connector'='hudi',\n" +
                "'path'= 'hdfs://192.168.1.161:8020/myslqjoinmysqlhudiSink', \n" +
                "'table.type'= 'MERGE_ON_READ',\n" +
                "'hoodie.datasource.write.recordkey.field'= 'id', \n" +
                "'write.precombine.field'= 'ts',\n" +
                "'write.tasks'= '1',\n" +
                "'write.rate.limit'= '2000', \n" +
                "'compaction.tasks'= '1', \n" +
                "'compaction.async.enabled'= 'true',\n" +
                "'compaction.trigger.strategy'= 'num_commits',\n" +
                "'compaction.delta_commits'= '1',\n" +
                "'changelog.enabled'= 'true',\n" +
                "'read.streaming.enabled'= 'true',\n" +
                "'read.streaming.check-interval'= '3',\n" +
                "'hive_sync.enable'= 'true',\n" +
                "'hive_sync.mode'= 'hms',\n" +
                "'hive_sync.metastore.uris'= 'thrift://node02.com:9083',\n" +
                "'hive_sync.jdbc_url'= 'jdbc:hive2://node02.com:10000',\n" +
                "'hive_sync.table'= 'myslqjoinmysqlhudiSink',\n" +
                "'hive_sync.db'= 'db_hive',\n" +
                "'hive_sync.username'= 'root',\n" +
                "'hive_sync.password'= '123456',\n" +
                "'hive_sync.support_timestamp'= 'true'\n" +
                ")");




//id,name,age,birthday,phone,address, ts

        TableResult kafkaSink = tableEnv.executeSql(
                "CREATE TABLE flinkCdc_kafka_Sink (" +
                        "  id BIGINT NOT NULL," +
                        "  name STRING," +
                        "  age INT," +
                        "  birthday STRING," +
                        "  phone STRING," +
                        "  address STRING," +
                        "  ts STRING" +
                        ") WITH (" +
                        "  'connector' = 'kafka'," +
                        "  'topic' = 'sinktest'," +
                        "  'scan.startup.mode' = 'earliest-offset', "+
                        "  'properties.bootstrap.servers' = '192.168.1.161:6667'," +
                        "  'format' = 'debezium-json'," +
                        "    'debezium-json.ignore-parse-errors'='true' " +
                        ")"
        );





//        // 5-通过子查询方式，将数据写入输出表
        tableEnv.executeSql(
                "INSERT INTO myslqjoinmysqlhudiSink " +
                        "SELECT id,name,age,birthday,phone,address,CAST(ts as TIMESTAMP) ts FROM viewFlinkCdc"
        );


            tableEnv.sqlQuery("select * from flinkCdc_kafka_Sink").printSchema();
            tableEnv.sqlQuery("select * from viewFlinkCdc").printSchema();

//        tableEnv.toAppendStream(tableMysqlJoin,String.class).print();
//        rowDataStream.addSink(MyKafkaUtil.getKafkaProducer("hudiflink"));
        tableEnv.executeSql("insert into flinkCdc_kafka_Sink  SELECT b.id id,b.name name,b.age age,CAST(b.birthday as STRING) birthday ,a.phone phone,a.address address,CAST(a.ts AS STRING) ts FROM source_mysql_Flink_cdd  a INNER JOIN   source_mysql b ON a.id = b.id ");
      //  tableEnv.executeSql("insert into myslqjoinmysqlhudiSink  SELECT b.id id,b.name name,b.age age,CAST(b.birthday as STRING) birthday ,a.phone phone,a.address address,CAST(a.ts AS STRING) ts FROM source_mysql_Flink_cdd  a INNER JOIN   source_mysql b ON a.id = b.id ");
     //   tableEnv.executeSql("insert into flinkCdc_kafka_Sink  select id,name,age,CAST(birthday as STRING) birthday ,phone, address,CAST(ts AS STRING) ts  from myslqjoinmysqlhudiSink ");

//        tableEnv.executeSql("insert into flinkcdc_hudi_sink  select id,name,age,CAST(birthday as STRING) birthday,  CAST(ts as STRING) ts  from source_mysql ");
        System.out.println("--------------------------");

//        env.execute("flink");

    }
}
