package com.atguigu.edu.app.dwd.db;

import com.atguigu.edu.util.KafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdTestExamQuestion {
    public static void main(String[] args) {
        //TODO 1 获取流的执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //设置并行度
        env.setParallelism(1);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //TODO 2 设置检查点和状态后端
        /*
        env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
        env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME", "atguigu");
         */

        //读取kafka对应数据
        String topicName = "topic_db";
        String groupID = "dwd_test_exam_question";
        tableEnv.executeSql("CREATE TABLE topic_db (\n" +
                "  `database` string,\n" +
                "  `table` string,\n" +
                "  `type` string,\n" +
                "  `ts` bigint,\n" +
                "  `xid` bigint,\n" +
                "  `commit` string,\n" +
                "  `data` map<string,string>,\n" +
                "  `old` map<string, string>,\n" +
                "   pt AS PROCTIME() \n " +
                ") " + KafkaUtil.getKafkaDDL(topicName, groupID));


        //todo 4 过滤出测验问题表数据
        Table testExamQuestionTable = tableEnv.sqlQuery("select\n" +
                "`data`['id'] id,\n" +
                "`data`['exam_id'] exam_id,\n" +
                "`data`['paper_id'] paper_id,\n" +
                "`data`['question_id'] question_id,\n" +
                "`data`['user_id'] user_id,\n" +
                "`data`['answer'] answer,\n" +
                "`data`['is_correct'] is_correct,\n" +
                "`data`['score'] score,\n" +
                "`data`['create_time'] create_time,\n" +
                "`data`['update_time'] update_time,\n" +
                "`data`['deleted'] deleted,\n" +
                " ts\n" +
                "from topic_db\n" +
                "where `table` = 'test_exam_question'\n");

        tableEnv.createTemporaryView("test_exam_question",testExamQuestionTable);



        //todo 5 写出到kafka主题中
        tableEnv.executeSql("create table sink_table(\n" +
                "id string,\n" +
                "exam_id string,\n" +
                "paper_id string,\n" +
                "question_id string,\n" +
                "user_id string,\n" +
                "answer string,\n" +
                "is_correct string,\n" +
                "score string,\n" +
                "create_time string,\n" +
                "update_time string,\n" +
                "deleted string ,\n" +
                " ts bigint \n" +
                ")" + KafkaUtil.getKafkaSinkDDL("test_exam_question"));
        tableEnv.executeSql("insert into sink_table select * from test_exam_question");

    }
}
