package com.fourth.app.dwd.db;

import com.fourth.utils.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import java.time.ZoneId;

public class DwdUserRegister {
    public static void main(String[] args) throws Exception {
        //TODO 1.环境搭建
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        tableEnv.getConfig().setLocalTimeZone(ZoneId.of("GMT+8"));

        //TODO 2.设置状态后端
       // env.enableCheckpointing(3000L, CheckpointingMode.EXACTLY_ONCE);
       // env.getCheckpointConfig().setCheckpointTimeout(60 * 1000L);
       // env.getCheckpointConfig().enableExternalizedCheckpoints(
       //         CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION
       // );
       // env.getCheckpointConfig().setMinPauseBetweenCheckpoints(3000L);
       // env.setRestartStrategy(
       //         RestartStrategies.failureRateRestart(3, Time.days(1L), Time.minutes(3L))
       // );
       // env.setStateBackend(new HashMapStateBackend());
       // env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/ck");
       // System.setProperty("HADOOP_USER_NAME", "atguigu");

        //TODO 3.从Kafka读取业务主题数据,转化为FlinkSQL表
        tableEnv.executeSql(
                "CREATE TABLE topic_db ( " +
                        "  `database` STRING, " +
                        "  `table` STRING, " +
                        "  `type` STRING, " +
                        "  `data` Map<STRING,STRING>, " +
                        "  `ts` STRING " +
                        ") "+ MyKafkaUtil.getKafkaDDL("topic_db","user_register")
        );

        //TODO 4.读取用户表数据
        Table userInfo = tableEnv.sqlQuery(
                "select " +
                        "data['id'] `id`, " +
                        "data['login_name'] `login_name`, " +
                        "data['nick_name'] `nick_name`, " +
                        "data['real_name'] `real_name`, " +
                        "data['phone_num'] `phone_num`, " +
                        "data['email'] `email`, " +
                        "data['user_level'] `user_level`, " +
                        "data['birthday'] `birthday`, " +
                        "data['gender'] `gender`, " +
                        "data['create_time'] `create_time`, " +
                        "data['operate_time'] `operate_time`, " +
                        "data['status'] `status` " +
                        "from topic_db " +
                        "where `table` = 'user_info' " +
                        "and `type` = 'insert'"
        );
        tableEnv.createTemporaryView("user_info",userInfo);

        //测试
        //tableEnv.toAppendStream(userInfo, Row.class).print();
        //env.execute();

        //TODO 5.创建Kafka-Connector dwd_user_register表
        tableEnv.executeSql(
                "CREATE TABLE dwd_user_register( " +
                        "`id` STRING, " +
                        "`login_name` STRING, " +
                        "`nick_name` STRING, " +
                        "`real_name` STRING, " +
                        "`phone_num` STRING, " +
                        "`email` STRING, " +
                        "`user_level` STRING, " +
                        "`birthday` STRING, " +
                        "`gender` STRING, " +
                        "`create_time` STRING, " +
                        "`operate_time` STRING, " +
                        "`status` STRING " +
                        ") " + MyKafkaUtil.getInsertKafkaDDL("dwd_user_register")
        );

        //TODO 6.将查询好的数据写入Kafka-Connector
        tableEnv.executeSql(
                "insert into dwd_user_register " +
                        "select "+
                        "id , " +
                        "login_name , " +
                        "nick_name , " +
                        "real_name , " +
                        "phone_num , " +
                        "email , " +
                        "user_level , " +
                        "birthday , " +
                        "gender , " +
                        "date_format(create_time, 'yyyy-MM-dd') create_time , " +
                        "date_format(operate_time, 'yyyy-MM-dd') operate_time , " +
                        "status  " +
                        "from user_info"
        );
    }
}
