package com.mai.realtime.app.dwd.db;

import com.mai.realtime.app.BaseSqlApp;
import com.mai.realtime.common.Constant;
import com.mai.realtime.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class Dwd_PaperExamStudent extends BaseSqlApp {
    public static void main(String[] args) {
        new Dwd_PaperExamStudent().init(3004, 2, "Dwd_PaperExamStudent");
    }

    @Override
    protected void handle(StreamExecutionEnvironment env,
                          StreamTableEnvironment tEnv) {
        // 1. 读取ods_db数据
        readOdsDb(tEnv, "Dwd_Paper_question_result");





        Table test_exam = tEnv.sqlQuery("select " +
                "data['id'] exam_id, " +
                "data['paper_id'] paper_id, " +
                "data['user_id'] user_id, " +
                "data['score'] score, " +
                "data['duration_sec'] duration_sec, " +
                "ts, " +
                "pt " +
                "from ods_db " +
                "where `database`='edu' " +
                "and `table`='test_exam' " +
                "and (" +
                " (`type`='bootstrap-insert' and data['id'] is not null) or `type`='insert' " +
                ")");



        tEnv.createTemporaryView("test_exam", test_exam);


        Table test_paper = tEnv.sqlQuery("select " +
                "data['id'] paper_id, " +
                "data['course_id'] course_id, " +
                "data['paper_title'] paper_title, " +
                "ts, " +
                "pt " +
                "from ods_db " +
                "where `database`='edu' " +
                "and `table`='test_paper' " +
                "and (" +
                " (`type`='bootstrap-insert' and data['id'] is not null) or `type`='insert' " +
                ")");

        tEnv.createTemporaryView("test_paper", test_paper);

        Table course_info = tEnv.sqlQuery("select " +
                "data['id'] course_id, " +
                "data['course_name'] course_name, " +
                "ts, " +
                "pt " +
                "from ods_db " +
                "where `database`='edu' " +
                "and `table`='course_info' " +
                "and (" +
                " (`type`='bootstrap-insert' and data['id'] is not null) or `type`='insert' " +
                ")");

        tEnv.createTemporaryView("course_info", course_info);


        // 4.两表join
        Table result = tEnv.sqlQuery("select " +
                " tp.paper_id, " +
                " te.user_id, " +
                " te.exam_id, " +
                " te.score, " +
                " case when te.score >= 80 then 'A'" +
                " when te.score >= 60 then 'B' " +
                " else 'C' end status, " +
                " te.duration_sec, " +
                " tp.paper_title paper_title, " +
                " tp.course_id, " +
                " ci.course_name, " +
                " te.ts " +
                " from test_paper tp " +
                " join test_exam te" +
                " on tp.paper_id = te.paper_id " +
                " join course_info ci " +
                " on tp.course_id = ci.course_id");


        tEnv.createTemporaryView("result", result);

        //查询


        // 5. 定义一个动态表与kafka的topic关联
        tEnv.executeSql("create table dwd_paper_exam_student( " +
                "paper_id string, " +
                "user_id string, " +
                "exam_id string, " +
                "score string, " +
                "status string, " +
                "duration_sec string, " +
                "paper_title string, " +
                "course_id string, " +
                "course_name string, " +
                "ts bigint " +
                ")" + SQLUtil.getKafkaSink(Constant.TOPIC_DWD_PAPER_EXAM_STUDENT)
        );

        result.executeInsert("dwd_paper_exam_student");
//

    }
}