package app.dwd;

import app.BaseSQLApp;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import utils.SQLUtil;

/**
 * @author Mingyang He
 * @email 861349663@qq.com
 * @date 2022/10/25
 * @time 11:12
 */
public class Dwd_13_DwdPaperExam extends BaseSQLApp {

    public static void main(String[] args) {
        new Dwd_13_DwdPaperExam().init(2013, 2, "Dwd_13_DwdPaperExam");
    }


    @Override
    protected void handle(StreamExecutionEnvironment env, StreamTableEnvironment tEnv) {
        //1.读取kafka的ods_db
        readOdsDb(tEnv, "Dwd_13_DwdPaperExam");

//        tEnv.sqlQuery("select * from ods_db").execute().print();

        // 2. 从 ods_db中过滤出考试相关字段
        Table testExam = tEnv.sqlQuery("select " + " `data`['id'] id, " + " `data`['user_id'] user_id, " + " `data`['paper_id'] paper_id, " + " `data`['score'] score, " + " `data`['duration_sec'] duration_sec, " + " ts " +
//                " pt " +
                " from ods_db " + " where `database`='edu' " + " and `table`='test_exam' " + " and `type`='insert' ");
        tEnv.createTemporaryView("test_exam", testExam);

        // 3. 写出到 kafka 中
        tEnv.executeSql("create table dwd_paper_exam(" + "id string,  " + "user_id string,  " + "paper_id string,  " + "score string,  " + "duration_sec string, " + "ts bigint  " + ")" + SQLUtil.getKafkaSink("dwd_paper_exam"));

        testExam.executeInsert("dwd_paper_exam");
    }
}