package com.atguigu.edu.realtime.app.dwd.db;

import com.atguigu.edu.realtime.util.MyKafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author zhangsan
 * @Date 2022/10/19 10:51
 * @Description //TODO dwd层考试域 试卷课程  考试相关情况
 */
public class DwdTestPaperCourseTest {
    public static void main(String[] args) throws Exception{
        //TODO 1.基本环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //TODO 2.检查点相关设置
        //TODO 3.从kafka的topic_db 中读取数据
        tableEnv.executeSql(MyKafkaUtil.getTopicDbDDL("dwd_test_paper_course_test_group"));
        //TODO 4. 过滤出 考试表中的数据  test_exam
        Table testExamTable = tableEnv.sqlQuery("select\n " +
                "`data`['id'] id,\n" +
                "`data`['paper_id'] paper_id,\n" +
                "`data`['user_id'] user_id,\n" +
                "`data`['score'] score,\n" +
                "`data`['duration_sec'] duration_sec,\n" +
                "`data`['create_time'] ts \n" +
                "from topic_db \n" +
                "where `table` = 'test_exam' \n" +
                "and `data`['deleted']='0'"
        );
        tableEnv.createTemporaryView("test_exam_temp",testExamTable);

        //TODO 5. 过滤出试卷表中的数据 test_paper
        Table testPaperTable = tableEnv.sqlQuery("select \n" +
                "`data`['id'] id,\n" +
                "`data`['course_id'] course_id\n" +
                "from topic_db\n" +
                "where `table` = 'test_paper'\n" +
                "and `data`['deleted']='0'");
        tableEnv.createTemporaryView("test_paper_temp",testPaperTable);
        //TODO 6. 将两张表关联    左连接 test_exam为主 test_paper为从
        Table resTable = tableEnv.sqlQuery("select \n" +
                "te.id,\n" +
                "te.paper_id,\n" +
                "tp.course_id,\n" +
                "te.user_id,\n" +
                "te.score,\n" +
                "te.duration_sec,\n" +
                "te.ts,\n" +
                "current_row_timestamp()   row_op_ts\n" +
                "from test_exam_temp as te\n" +
                "left join test_paper_temp tp on te.paper_id = tp.id");
        tableEnv.createTemporaryView("res_table",resTable);
        //TODO 7.将关联的结果写到kafka对应的主题中
        tableEnv.executeSql("CREATE TABLE dwd_test_paper_course_test (\n" +
                "  id string,\n" +
                "  paper_id string,\n" +
                "  course_id string,\n" +
                "  user_id  string,\n" +
                "  score  string,\n" +
                "  duration_sec string,\n" +
                "  ts string,\n" +
                "row_op_ts timestamp_ltz(3),\n" +
                "  PRIMARY KEY (id) NOT ENFORCED\n" +
                ") " + MyKafkaUtil.getUpsertKafkaDDL("dwd_test_paper_course_test"));

        //tableEnv.executeSql("select * from res_table").print();
        tableEnv.executeSql("insert into dwd_test_paper_course_test select * from res_table");

    }
}
