package com.atliuzu.app.dwd.db;

import com.atliuzu.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.ZoneId;

/**
 * @author w
 * @create 2022-08-19-11:31
 */
public class DwdUserRegister {
    public static void main(String[] args) {
        //TODO 1.获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment ( );
        env.setParallelism ( 1 );
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create ( env );
        //tableEnv.getConfig ().setLocalTimeZone ( ZoneId.of ( "GMT+8" ) );

        //TODO 2.设置状态后端
         /*env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
        env.getCheckpointConfig().enableExternalizedCheckpoints(
                CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
        );
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
        env.setRestartStrategy(
                RestartStrategies.failureRateRestart(3, Time.days(1L), Time.minutes(3L))
        );
        env.setStateBackend(new HashMapStateBackend ());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/ck");
        System.setProperty("HADOOP_USER_NAME", "atguigu");*/

        //TODO 3.从kafka读取数据封装为flinkSQL(topic_db)
        tableEnv.executeSql ( "" + "CREATE TABLE topic_db ( " +
                "  `database` String, " +
                "  `table` String, " +
                "  `type` STRING, " +
                "  `data` map<String,String>, " +
                "  `ts` BigInt, " +
                "  `pt` AS PROCTIME() " +
                ") " + MyKafkaUtil.getKafkaDDL ( "topic_db" ,"user_register_06" ));

        //TODO 4.读取用户数据
        Table userInfo = tableEnv.sqlQuery ( "" + "select " +
                "  data['id'] user_id, " +
                "  data['create_time'] create_time, " +
                "  `ts`, " +
                "  `pt` " +
                "  from topic_db " +
                "  where `table` = 'user_info' " +
                "  and `type` = 'insert'" );
        tableEnv.createTemporaryView ( "user_info",userInfo );

        userInfo.execute ().print ();

        //TODO 5.创建kafka-connector dwd_user_register表
        tableEnv.executeSql ( "" + "create table dwd_user_register( " +
                "    `user_id` String, " +
                "    `create_time` String, " +
                "    `date_id` String, " +
                "    `ts`, BigInt " +
                "    `pt` BigInt " +
                "  )" + MyKafkaUtil.getInsertKafkaDDL ( "dwd_user_register" ));

        //TODO 6.将数据写入kafka-connector表
        tableEnv.executeSql ( "" + " insert into dwd_user_register " +
                "    select " +
                "      user_id, " +
                "      create_time, " +
                "      date_format(create_time,'yyyy-MM-dd') date_id, " +
                "      `ts`, " +
                "      `pt` " +
                "      from user_info" );
    }
}
