package com.atguigu.edu.realtime.app.dwd.db;

import com.atguigu.edu.realtime.util.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author:ray
 * @time:2022/10/15 10:51
 * @description:
 **/
public class DwdTestExamQuestionDetail {
    public static void main(String[] args) {
        //TODO 1、环境配置
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //TODO 2、检查点配置
        //2.1 设置检查点
        /*env.enableCheckpointing(5000L, CheckpointingMode.EXACTLY_ONCE);
        //2.2 设置检查点的超时时间
        env.getCheckpointConfig().setCheckpointTimeout(60000L);
        //2.3 job 取消之后检查点是否保留
        env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);
        //2.4 两个检查点之间最小时间间隔
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(2000L);
        //2.5 设置重启策略
        env.setRestartStrategy(RestartStrategies.failureRateRestart(3, Time.days(30), Time.seconds(3)));
        //2.6 设置状态后端
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");*/
        //TODO 3、从topic_db中读取数据
        tableEnv.executeSql(MyKafkaUtil.getTopicDbDDL("test_exam_question_detail"));

        //TODO 4、过滤出考试答题表
        /*{"database":"edu","table":"test_exam_question","type":"insert","ts":1665716562,"xid":81776,"xoffset":4,"data":{"id":55329,"exam_id":3727,"paper_id":211,"question_id":1800,"user_id":1262,"answer":"5880,5881,5882","is_correct":"0","score":0.00,"create_time":"2022-10-14 11:02:42","update_time":null,"deleted":"0"}}*/

        Table testExamQuestion= tableEnv.sqlQuery("select\n" +
                " data['id'] id,\n" +
                " data['exam_id'] exam_id,\n" +
                " data['paper_id'] paper_id,\n" +
                " data['question_id'] question_id,\n" +
                " data['user_id'] user_id,\n" +
                " data['is_correct'] is_correct,\n" +
                " data['score'] score,\n" +
                " ts\n" +
                " from topic_db\n" +
                " where `table` = 'test_exam_question'"
        );
        //TODO 5、将考试详情表写入kafka主题
        tableEnv.executeSql("create table dwd_test_question_detail(\n" +
                " id string,\n" +
                " exam_id string,\n" +
                " paper_id string,\n" +
                " question_id string,\n" +
                " user_id string,\n" +
                " is_correct string,\n" +
                " score string,\n" +
                " ts string," +
                " primary key(id) not enforced \n" +
                ")" + MyKafkaUtil.getUpsertKafkaDDL("dwd_test_exam_question_detail"));

        testExamQuestion.executeInsert("dwd_test_question_detail");
    }


}
