package app.dwd;

import app.BaseSQLApp;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import utils.SQLUtil;

/**
 * @author Mingyang He
 * @email 861349663@qq.com
 * @date 2022/10/25
 * @time 14:39
 */
public class Dwd_14_DwdCourseExam extends BaseSQLApp {

    public static void main(String[] args) {
        new Dwd_14_DwdCourseExam().init(2014, 2, "Dwd_14_DwdCourseExam");
    }

    @Override
    protected void handle(StreamExecutionEnvironment env, StreamTableEnvironment tEnv) {


        //1.读取kafka的ods_db
        readOdsDb(tEnv, "Dwd_14_DwdCourseExam");

        // 2. 从 ods_db中过滤出test_exam需要的字段
        Table testExam = tEnv.sqlQuery("select " +
                //               " `data`['id'] id, " +
                " `data`['user_id'] user_id, " + " `data`['paper_id'] paper_id, " + " `data`['score'] score, " + " `data`['duration_sec'] duration_sec, " + " ts " + " from ods_db " + " where `database`='edu' " + " and `table`='test_exam' ");

        tEnv.createTemporaryView("test_exam", testExam);


        // 3. 从 ods_db中过滤出test_paper需要的字段
        Table paperExam = tEnv.sqlQuery("select " + " `data`['id'] id, " + " `data`['course_id'] course_id, " + " `data`['paper_title'] paper_title " + " from ods_db " + " where `database`='edu' " + " and `table`='test_paper' ");
        tEnv.createTemporaryView("test_paper", paperExam);

        //test_paper与test_exam二者join
        Table result = tEnv.sqlQuery("select " + "te.user_id,  " + "te.score,  " + "te.duration_sec,  " + "tp.course_id,  " + "tp.paper_title  " + "from test_exam te " + "join test_paper tp on te.paper_id=tp.id ");

        // 3. 写出到 kafka 中
        tEnv.executeSql("create table dwd_course_exam(" + "id string,  " + "user_id string,  " + "paper_id string,  " + "score string,  " + "duration_sec string " + ")" + SQLUtil.getKafkaSink("dwd_course_exam"));

        result.executeInsert("dwd_course_exam");
    }
}
