package com.atguigu.realtime.app.dwd.db;

import com.atguigu.realtime.utils.KafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdTestExam {
    public static void main(String[] args) {
        // TODO 1 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // TODO 2 设置状态后端
        /*
        env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
        env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME", "atguigu");
         */

        //todo 3 读取试卷考试数据，封装为表
        String topicName = "topic_db";
        String groupID = "dwd_test_exam";
        tableEnv.executeSql("create table topic_db(\n" +
                " `database` string,\n" +
                " `table` string,\n" +
                " `type` string,\n" +
                " `data` map<string,string>,\n" +
                " `ts` bigint,\n" +
                " `pt` AS PROCTIME()\n" +
                ")"+ KafkaUtil.getKafkaDDL(topicName,groupID));


        Table testExam = tableEnv.sqlQuery("select" +
                " cast(data['id'] as string) id," +
                " cast(data['paper_id'] as string) paper_id," +
                " cast(data['user_id'] as string) user_id," +
                " cast(data['score'] as string) score," +
                " cast(data['duration_sec'] as string) duration_sec," +
                " date_format(data['create_time'], 'yyyy-MM-dd') date_id," +
                " data['create_time'] create_time," +
                " case when data['score']>=0 and data['score']<60 then '不及格'" +
                "      when data['score']>=60 and data['score']<70 then '及格'" +
                "      when data['score']>=70 and data['score']<80 then '良好'" +
                "      when data['score']>=80 then '优秀' end score_level" +
                " from topic_db" +
                " where `table`='test_exam' " +
                " and `type`='insert' ");
        tableEnv.createTemporaryView("test_exam",testExam);

        //testExam.execute().print();
        //todo 4 建立 dwd_test_exam表
        tableEnv.executeSql("create table dwd_test_exam( " +
                "    `id` String, " +
                "    `paper_id` String, " +
                "    `user_id` String, " +
                "    `score` String, " +
                "    `duration_sec` String, " +
                "    `date_id` String, " +
                "    `create_time` String, " +
                "    `score_level` string " +
                ") "+ KafkaUtil.getKafkaSinkDDL("dwd_test_exam"));


        //todo 5 将结果写入dwd_test_exam里
        tableEnv.executeSql("insert into dwd_test_exam select * from test_exam");

    }
}
