package com.edu.yx.app.App_03_DwdApp.Dwd_05_Exam;

import com.edu.yx.app.APP_01_BaseApp.BaseSqlApp;
import com.edu.yx.util.SQLUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

import static com.edu.yx.common.Constant.*;

public class Dwd_01_Exam_PaperDetail extends BaseSqlApp {

    public static void main(String[] args) {
        new Dwd_01_Exam_PaperDetail().init(
                3051,
                2,
                "Dwd_01_Exam_PaperDetail"
        );
    }

    @Override
    protected void handle(StreamExecutionEnvironment env, StreamTableEnvironment tEnv) {

        // 三个需求公用一个dwd
        //todo 1、一场考试的最细粒度为一张试卷（解决试卷粒度问题）
        //todo 2、一场考试的最细粒度为一张试卷 可能存在一个课程多个不同考试，类似同一个order_id下不同的sku_id
        //todo 3、一场考试最细粒度为试卷，试卷追加一个判断分数的case（试卷分数范围）


        //1.读取试卷信息和课程信息
        readTestPaper(tEnv);

        readCourseInfo(tEnv);


        //2.读取kafka的数据源数据
        readOdsDb(tEnv,"Dwd_01_Exam_PaperDetail");

        //todo 3.构建: dwd_exam_paper_detail

        //3.1 先拿ods_db的数据，构成基础的试卷粒度表 paper_detail
        Table paper_detail_table = tEnv.sqlQuery("select " +
                "cast(data['paper_id'] as bigint) id," +
                "data['user_id'] user_id," +
                "data['score'] score," +
                "data['duration_sec'] duration_sec," +
                "data['create_time'] create_time," +
                "`type`, " +
                "`old`," +
                " ts ," +
                " pt " +
                " from ods_db " +
                " where `database` = 'edu' " +
                " and `table` = 'test_exam' " +
                " and `type` = 'insert' "
        );
        tEnv.createTemporaryView("paper_detail_table",paper_detail_table);



        //3.2 join test_paper
        //join base_dic for system_time as of od.pt as dic on od.source_type=dic.dic_code
        /**
         * flink 的case when then
         * CASE WHEN condition1 THEN result1 (WHEN condition2 THEN result2)* (ELSE result_z) END
         */
        Table result = tEnv.sqlQuery("select  " +
                " cast(pd.id as string) id," +
                " tp.paper_title," +
                " cast(tp.course_id as string) course_id," +
                " ci.course_name," +
                " cast(user_id as string) user_id," +
                " cast(score as bigint) score ," +  //要进行计算
                " case " +
                "   when score >=90 and score <=100 then '90-100' " +
                "   when score >=80 and score < 90 then '80-90' " +
                "   when score >=70 and score < 80 then '70-80' " +
                "   when score >=60 and score < 70 then '60-70' " +
                "   when score < 60 then 'less_than_60' " +
                " end flag," +
                " cast(duration_sec as bigint) duration_sec," +
                " date_format(pd.create_time,'yyyy-MM-dd') date_id," +
                " pd.`type`," +
                " pd.`old`," +
                " pd.ts," +
                " current_row_timestamp() as row_op_ts " +
                " from paper_detail_table pd " +
                " join test_paper for system_time as of pd.pt as tp on pd.id = tp.id " +
                " join course_info for system_time as of pd.pt as ci on tp.course_id = ci.id "
        );

        //4.创建连接kafka的表
        tEnv.executeSql("create table dwd_exam_paper_detail(" +
                " id string," +
                " paper_title string," +
                " course_id string," +
                " course_name string," +
                " user_id string," +
                " score bigint," + //后面要运算
                "flag string," +
                "duration_sec bigint," +
                "date_id string," +
                "`type` string, " +
                "`old` MAP<STRING, STRING>, " +
                "ts bigint," +
                "row_op_ts timestamp_ltz(3)" +
                ")" + SQLUtil.getKafkaSink(TOPIC_DWD_EXAM_PAPER_DETAIL)
        );

        //5.导入数据到表中
        result.executeInsert("dwd_exam_paper_detail");
    }
}
