package realtime.app.dwd.db;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import realtime.util.EnvUtil;
import realtime.util.HbaseUtil;
import realtime.util.KafkaUtil;

public class DwdExamPaper {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = EnvUtil.getSEE(4);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        tableEnv.executeSql(KafkaUtil.getTopicDbDDL("dwd_exam_paper_group"));

        Table testExam = tableEnv.sqlQuery("select" +
                " data['id'] id,\n" +
                " data['paper_id'] paper_id,\n" +
                " data['user_id'] user_id,\n" +
                " data['score'] score,\n" +
                " data['duration_sec'] duration_sec,\n" +
                " data['create_time'] create_time,\n" +
                " data['submit_time'] submit_time,\n" +
                " data['update_time'] update_time," +
                " proc_time," +
                " ts " +
                " from topic_db" +
                " where `table`='test_exam' and `type` = 'insert'");
        tableEnv.createTemporaryView("test_exam",testExam);

        String testPaper = "CREATE TABLE test_paper (" +
                " id string," +
                // 声明列族,类型必须是 Row 类型, 嵌套的字段名是 hbase 中的列.
                " info ROW<course_id string,paper_title string>," +
                " PRIMARY KEY (id) NOT ENFORCED" +
                ")" + HbaseUtil.getHbaseDDL("edu_realtime:dim_test_paper");

        tableEnv.executeSql(testPaper);

        Table result = tableEnv.sqlQuery("select " +
                " t1.id," +
                " t1.paper_id," +
                " t1.user_id," +
                " t1.score," +
                " t1.duration_sec," +
                " t1.create_time," +
                " t1.update_time," +
                " t1.ts," +
                " t2.info.course_id," +
                " t2.info.paper_title" +
                " from test_exam t1" +
                " join test_paper for system_time as of t1.proc_time as t2" +
                " on t1.paper_id = t2.id");
        tableEnv.createTemporaryView("result",result);

        tableEnv.executeSql("create table dwd_exam_paper(" +
                " id string,\n" +
                " paper_id string,\n" +
                " user_id string,\n" +
                " score string,\n" +
                " duration_sec string,\n" +
                " create_time string,\n" +
                " update_time string,\n" +
                " ts string,\n" +
                " course_id string,\n" +
                " paper_title string," +
                " primary key(id) not enforced" +
                " )" + KafkaUtil.getUpsertKafkaDDL("dwd_exam_paper"));
        tableEnv.executeSql("insert into dwd_exam_paper select * from `result`");
    }
}
