package com.atguigu.edu.realtime.app.dwd.db;

import com.atguigu.edu.realtime.util.MyKafkaUtil;
import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author:ray
 * @time:2022/10/14 23:07
 * @description:
 **/
public class DwdUserRegister {
    public static void main(String[] args) {
        //TODO 1、配置环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //TODO 2、检查点设置
//        env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
//        env.getCheckpointConfig().setCheckpointTimeout(60*1000L);
//        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
//        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
//        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30),Time.seconds(3)));
//        env.setStateBackend(new HashMapStateBackend());
//        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop101:8020/edu/ck");
//        System.setProperty("HADOOP_USER","atguigu");

        //TODO 3、从topic_db中读取数据，封装成flink Sql表
        tableEnv.executeSql(
                MyKafkaUtil.getTopicDbDDL("dwd_user_register_group")
        );

        //TODO 4、读取用户表数据
        Table userInfo = tableEnv.sqlQuery(
                "select\n" +
                        "data['id'] user_id,\n" +
                        "data['create_time'] create_time,\n" +
                        "ts\n" +
                        "from topic_db\n" +
                        "where `table` = 'user_info'\n" +
                        "and `type` = 'insert'\n"

        );
        tableEnv.createTemporaryView("userInfo",userInfo);


        //TODO 5、创建dwd_user_register
        tableEnv.executeSql(
                "create table `dwd_user_register`(\n" +
                        "`user_id` string,\n" +
                        "`date_id` string,\n" +
                        "`create_time` string,\n" +
                        "`ts` string,\n" +
                        "primary key(`user_id`) not enforced\n" +
                        ")" + MyKafkaUtil.getUpsertKafkaDDL("dwd_user_register")
        );

        //TODO 6、 将数据写入kafka
        TableResult tableResult = tableEnv.executeSql(
                "insert into dwd_user_register\n" +
                        "select \n" +
                        "user_id,\n" +
                        "date_format(create_time, 'yyyy-MM-dd') date_id,\n" +
                        "create_time,\n" +
                        "ts\n" +
                        "from user_info"
        );
    }
}
