package com.atguigu.edu.app.dwd.db;

import com.atguigu.edu.util.KafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdExamDetail {
    public static void main(String[] args) {

        // TODO 1 环境准备
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // TODO 2 设置状态后端
        // 目前只在本地测试，用不到状态后端
        /*
        env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE );
        env.getCheckpointConfig().setCheckpointTimeout( 3 * 60 * 1000L );
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.setStateBackend(new HashMapStateBackend());
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME", "atguigu");
                 */

        // TODO 3 读取kafka的topic_db原始数据
        String topicName = "topic_db";
        String groupId = "dwd_exam_detail";
        tableEnv.executeSql("create table topic_db(\n" +
                "  `database` STRING,\n" +
                "  `table` STRING,\n" +
                "  `type` STRING,\n" +
                "  `ts` bigint,\n" +
                "  `data` MAP<STRING,STRING>,\n" +
                "  `old` MAP<STRING,STRING>,\n" +
                "  `pt` as proctime()\n" +
                ")" + KafkaUtil.getKafkaDDL(topicName,groupId));

        // TODO 4 过滤出test_exam & test_exam_question
        Table testExam = tableEnv.sqlQuery("select\n" +
                "  `data`['id'] id,\n" +
                "  `data`['paper_id'] paper_id,\n" +
                "  `data`['user_id'] user_id,\n" +
                "  `data`['score'] score,\n" +
                "  `data`['duration_sec'] duration_sec,\n" +
                "  `data`['create_time'] create_time,\n" +
                "  `data`['submit_time'] submit_time,\n" +
                "  `data`['update_time'] update_time,\n" +
                "  `data`['deleted'] deleted,\n" +
                "  `pt`,\n" +
                "  `ts`\n" +
                "from topic_db\n" +
                "where `table` = 'test_exam'\n" +
                "and `type` = 'insert'");
        tableEnv.createTemporaryView("test_exam",testExam);

        Table testExamQuestion = tableEnv.sqlQuery("select\n" +
                "  `data`['id'] id,\n" +
                "  `data`['exam_id'] exam_id,\n" +
                "  `data`['paper_id'] paper_id,\n" +
                "  `data`['question_id'] question_id,\n" +
                "  `data`['user_id'] user_id,\n" +
                "  `data`['answer'] answer,\n" +
                "  `data`['is_correct'] is_correct,\n" +
                "  `data`['score'] score,\n" +
                "  `data`['create_time'] create_time,\n" +
                "  `data`['update_time'] update_time,\n" +
                "  `data`['deleted'] deleted,\n" +
                "  `pt`,\n" +
                "  `ts`\n" +
                "from topic_db\n" +
                "where `table` = 'test_exam_question'\n" +
                "and `type` = 'insert'");
        tableEnv.createTemporaryView("test_exam_question",testExamQuestion);


        // TODO 5 join 两张表
        Table resultTable = tableEnv.sqlQuery("select\n" +
                "  t1.id,\n" +
                "  t1.user_id,\n" +
                "  t1.paper_id,\n" +
                "  t1.score score_exam,\n" +
                "  t2.question_id,\n" +
                "  t2.score score_question,\n" +
                "  t1.duration_sec dur_exam,\n" +
                "  t1.create_time exam_start,\n" +
                "  t2.create_time question_start,\n" +
                "  t2.is_correct,\n" +
                "  t1.pt exam_pt,\n" +
                "  t1.ts exam_ts,\n" +
                "  t2.pt question_pt,\n" +
                "  t2.ts question_ts\n" +
                "from test_exam t1\n" +
                "join test_exam_question t2\n" +
                "on t1.id = t2.exam_id \n" +
                "and t1.user_id = t2.user_id\n" +
                "and t1.paper_id = t2.paper_id");

        tableEnv.createTemporaryView("result_table",resultTable);

        //tableEnv.executeSql("select * from result_table");

        // 输出到kafka
        String targetTopicName = "dwd_exam_detail";
        tableEnv.executeSql("create table kafka_exam_detail(\n" +
                "  user_id STRING,\n" +
                "  paper_id STRING,\n" +
                "  id STRING,\n" +
                "  score_exam STRING,\n" +
                "  question_id STRING,\n" +
                "  score_question STRING,\n" +
                "  dur_exam STRING,\n" +
                "  exam_start STRING,\n" +
                "  question_start STRING,\n" +
                "  is_correct STRING,\n" +
                "  exam_pt TIMESTAMP_LTZ(3),\n" +
                "  exam_ts BIGINT,\n" +
                "  question_pt TIMESTAMP_LTZ(3),\n" +
                "  question_ts BIGINT\n" +
                ")" + KafkaUtil.getKafkaSinkDDL(targetTopicName));

        tableEnv.executeSql("insert into kafka_exam_detail select * from result_table");


    }
}
