package com.atguigu.edu.realtime.app.dwd.db;

import com.atguigu.edu.realtime.util.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author:ray
 * @time:2022/10/15 13:52
 * @description:
 **/
public class DwdTestExamDetail {
    public static void main(String[] args) {
        //TODO 1、配置环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //TODO 2、检查点设置
        //2.1 设置检查点
        /*env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
        //2.2 设置检查点的超时时间
        env.getCheckpointConfig().setCheckpointTimeout(60000L);
        //2.3 job 取消之后检查点是否保留
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        //2.4 两个检查点之间最小时间间隔
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
        //2.5 设置重启策略
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30), Time.seconds(3)));
        //2.6 设置状态后端
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");*/

        //TODO 3、从kafka的topic_db中读取数据
        tableEnv.executeSql(MyKafkaUtil.getTopicDbDDL("dwd_test_exam_detail_group"));

        //{"database":"edu","table":"test_exam","type":"insert","ts":1665716562,"xid":81914,"commit":true,"data":{"id":3731,"paper_id":177,"user_id":882,"score":35.00,"duration_sec":1115,"create_time":"2022-10-14 11:02:42","submit_time":"2022-10-14 11:02:42","update_time":null,"deleted":"0"}}
        //TODO 4、过滤出test_exam表
        Table testExam = tableEnv.sqlQuery("" +
                "select data['id'] id, " +
                "data['paper_id'] paper_id, " +
                "data['user_id'] user_id, " +
                "data['score'] score, " +
                "data['duration_sec'] duration_sec, " +
                "ts " +
                "from topic_db " +
                " where `table` = 'test_exam' ");
        //TODO 5、创建dwd_test_exam_detail表并插入数据
        tableEnv.executeSql("" +
                "create table dwd_test_exam_detail" +
                "( id string, " +
                "  paper_id string, " +
                " user_id string, " +
                " score string, " +
                " duration_sec string, " +
                " ts string," +
                " primary key(id) not enforced )"+MyKafkaUtil.getUpsertKafkaDDL("dwd_test_exam_detail"));

        testExam.executeInsert("dwd_test_exam_detail");

    }
}
