package cn.dmrliu.edu.realtime.app.dwd.db;

import cn.dmrliu.edu.realtime.util.KafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * 工具域用户测验 事务事实表
 */
public class DwdToolExam {
    public static void main(String[] args) {
        // TODO 1. 基本环境的准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // TODO 2. 检查点的设置

        // TODO 3. 从kafka的db主题读取数据，创建动态表
        String group = "edu_dwd_tool_exam_group";
        tableEnv.executeSql(KafkaUtil.getEduDbDDL(group));

//        tableEnv.executeSql("select * from edu_db").print();

        // TODO 4.筛选所需要的表 test_exam test_exam_question
        Table testExam = tableEnv.sqlQuery("" +
                "select \n" +
                "  `data`['id'] id,\n" +
                "  `data`['user_id'] user_id,\n" +
                "  `data`['paper_id'] paper_id,\n" +
                "  `data`['duration_sec'] duration_sec,\n" +
                "  `data`['submit_time'] submit_time,\n" +
                "  ts,\n" +
                "  proc_time\n" +
                "from edu_db\n" +
                "where `table` = 'test_exam'");
        tableEnv.createTemporaryView("test_exam", testExam);

//        tableEnv.executeSql("select * from test_exam").print();

        Table testExamQuestion = tableEnv.sqlQuery("" +
                "select \n" +
                "  `data`['id'] id,\n" +
                "  `data`['exam_id'] exam_id,\n" +
                "  `data`['paper_id'] paper_id,\n" +
                "  `data`['question_id'] question_id,\n" +
                "  `data`['user_id'] user_id,\n" +
                "  `data`['is_correct'] is_correct,\n" +
                "  `data`['score'] score,\n" +
                "  ts,\n" +
                "  proc_time\n" +
                "from edu_db\n" +
                "where `table` = 'test_exam_question'");
        tableEnv.createTemporaryView("test_exam_question", testExamQuestion);

//        tableEnv.executeSql("select * from test_exam_question").print();

        // TODO 5.join
        Table joinTable = tableEnv.sqlQuery("" +
                "select\n" +
                "  eq.id,\n" +
                "  eq.exam_id,\n" +
                "  eq.paper_id,\n" +
                "  eq.question_id,\n" +
                "  eq.user_id,\n" +
                "  eq.is_correct,\n" +
                "  eq.score,\n" +
                "  ex.duration_sec,\n" +
                "  ex.submit_time,\n" +
                "  eq.ts\n" +
                "from test_exam_question eq\n" +
                "join test_exam ex\n" +
                "on eq.exam_id = ex.id");
        tableEnv.createTemporaryView("join_table", joinTable);

//        tableEnv.executeSql("select * from join_table").print();


        // TODO 6.写入kafka
        tableEnv.executeSql("" +
                "create table dwd_tool_exam(\n" +
                "  id string,\n" +
                "  exam_id string,\n" +
                "  paper_id string,\n" +
                "  question_id string,\n" +
                "  user_id string,\n" +
                "  is_correct string,\n" +
                "  score string,\n" +
                "  duration_sec string,\n" +
                "  submit_time string,\n" +
                "  ts string,\n" +
                "  PRIMARY KEY (id) NOT ENFORCED\n" +
                ")"
                + KafkaUtil.getUpsertKafkaDDL("edu_dwd_tool_exam")
        );
        tableEnv.executeSql("insert into dwd_tool_exam select * from join_table");

    }
}
