package com.atguigu.app.dwd.db;

import com.atguigu.util.KafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdTestExamPaper {
    public static void main(String[] args) {
        // TODO 1 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // TODO 2 设置状态后端
        /*
        env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
        env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME", "atguigu");
         */

        //TODO 3 从topic_db中获取数据
        String topicDb = "topic_db";
        String groupId = "dwd_test_exam_paper";
        tableEnv.executeSql("create table topic_db(\n" +
                "    `table` string,\n" +
                "    `type` string,\n" +
                "    `ts` bigint,\n" +
                "    `data` map<string,string>," +
                "    `pt` as proctime() )"+ KafkaUtil.getKafkaDDL(topicDb, groupId));
        //TODO 4 筛选出需要的数据
        //4.1 测验表（test_exam）
        Table testExamTable = tableEnv.sqlQuery("select\n" +
                "  `data`['id'] id,\n" +
                "  `data`['paper_id'] paper_id,\n" +
                "  `data`['user_id'] user_id,\n" +
                "  `data`['score'] score,\n" +
                "  `data`['duration_sec'] duration_sec,\n" +
                "  `data`['create_time'] create_time,\n" +
                "  `data`['submit_time'] submit_time,\n" +
                "  `data`['update_time'] update_time,\n" +
                "  `data`['deleted'] deleted,\n" +
                "  `ts` ts,\n" +
                "  `pt` pt\n" +
                "from topic_db\n" +
                "where `table`='test_exam' and `type`='insert'");
        tableEnv.createTemporaryView("test_exam",testExamTable);

        //4.2 试卷表（test_paper）
        Table testPaperTable = tableEnv.sqlQuery("select\n" +
                "  `data`['id'] id,\n" +
                "  `data`['paper_title'] paper_title,\n" +
                "  `data`['course_id'] course_id,\n" +
                "  `data`['create_time'] create_time,\n" +
                "  `data`['update_time'] update_time,\n" +
                "  `data`['publisher_id'] publisher_id,\n" +
                "  `data`['deleted'] deleted,\n" +
                "  `ts` ts\n" +
                "from topic_db\n" +
                "where `table`='test_paper' and `type`='insert'");
        tableEnv.createTemporaryView("test_paper",testPaperTable);

        //TODO 5 将两表进行left join
        Table resultTable = tableEnv.sqlQuery("select\n" +
                "    te.id,\n" +
                "    te.paper_id,\n" +
                "    te.user_id,\n" +
                "    tp.course_id,\n" +
                "    te.score,\n" +
                "    te.duration_sec,\n" +
                "    te.submit_time,\n" +
                "    te.ts,\n" +
                "    te.pt,\n" +
                "    UNIX_TIMESTAMP() rt\n" +
                "from test_exam te \n" +
                "left join test_paper tp on te.paper_id=tp.id");
        tableEnv.createTemporaryView("result_table",resultTable);
        //TODO 6 将数据写出到kafka

        String sinkTopicName = "dwd_test_exam_paper";
        tableEnv.executeSql("create table test_exam_paper(\n" +
                "  id string,\n" +
                "  paper_id string,\n" +
                "  user_id string,\n" +
                "  course_id string,\n" +
                "  score string,\n" +
                "  duration_sec string,\n" +
                "  submit_time string,\n" +
                "  ts bigint,\n" +
                "  pt timestamp_ltz(3),\n" +
                "  rt bigint,\n" +
                "  primary key(id) not enforced \n" +
                ")" + KafkaUtil.getUpsertKafkaSinkDDL(sinkTopicName));
        tableEnv.executeSql("insert into test_exam_paper select * from result_table");


    }
}
