package com.atguigu.edu.app.dwd.db;

import com.atguigu.edu.util.KafkaUtil;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.runtime.state.hashmap.HashMapStateBackend;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdTestExamPaper {
    public static void main(String[] args) {
        //todo 1 初始化环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
                env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
        //todo 2 设置状态后端
        /*env.enableCheckpointing(5 * 1000L, CheckpointingMode.AT_LEAST_ONCE);
        env.getCheckpointConfig().setCheckpointTimeout(3 * 60 *1000L);
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME","atguigu");
        env.setStateBackend(new HashMapStateBackend());*/

        // 设置TTL时间
        //Configuration configuration = tableEnv.getConfig().getConfiguration();
        //configuration.setString("table.exec.state.ttl", "5 s");
        //todo 3 从kafka获取主题数据
        String topicName = "topic_db";
        String groupId = "dwd_test_exam_paper";
        tableEnv.executeSql("CREATE TABLE topic_db (\n" +
                "  `database` string,\n" +
                "  `table` string,\n" +
                "  `type` string,\n" +
                "  `ts` bigint,\n" +
                "  `xid` bigint,\n" +
                "  `commit` string,\n" +
                "  `data` map<string,string>,\n" +
                "  `old` map<string,string> \n" +
                //"   pt AS PROCTIME() \n" +
                ") " + KafkaUtil.getKafkaDDL(topicName, groupId));

        //todo 4 过滤出测验表数据
        Table testExamTable = tableEnv.sqlQuery("select\n" +
                "`data`['id'] id,\n" +
                "`data`['paper_id'] paper_id,\n" +
                "`data`['user_id'] user_id,\n" +
                "`data`['score'] score,\n" +
                "`data`['duration_sec'] duration_sec,\n" +
                "`data`['create_time'] create_time,\n" +
                "`data`['submit_time'] submit_time,\n" +
                "`data`['update_time'] update_time,\n" +
                " ts\n" +
                "from topic_db\n" +
                "where `table` = 'test_exam'\n" +
                "and `type` = 'insert'\n"+
                "and `data`['deleted'] = '0'");
        tableEnv.createTemporaryView("test_exam",testExamTable);

        //testExamTable.execute().print();
        //todo 5 过滤出试卷表数据
        /*Table testPaperTable = tableEnv.sqlQuery("select\n" +
                "`data`['id'] id,\n" +
                "`data`['paper_title'] paper_title,\n" +
                "`data`['course_id'] course_id,\n" +
                "`data`['create_time'] create_time,\n" +
                "`data`['update_time'] update_time,\n" +
                "`data`['publisher_id'] publisher_id,\n" +
                "`data`['deleted'] deleted\n" +
                "from topic_db\n" +
                "where `table` = 'test_paper'\n" +
                "and `type` = 'insert'\n" +
                "and `data`['deleted'] = '0'");
        tableEnv.createTemporaryView("test_paper",testPaperTable);*/

        //todo 6
        Table resultTable = tableEnv.sqlQuery("select\n" +
                "te.id,\n" +
                "te.paper_id,\n" +
                "te.user_id,\n" +
                "te.score,\n" +
                "te.duration_sec,\n" +
                "te.create_time,\n" +
                "ts\n" +
                "from\n" +
                "test_exam te");
        tableEnv.createTemporaryView("result_table",resultTable);

        //todo 8 写出到kafka主题中
        tableEnv.executeSql("create table exam_paper_table(\n" +
                "id String,\n" +
                "paper_id String,\n" +
                "user_id String,\n" +
                "score String,\n" +
                "duration_sec String,\n" +
                "create_time String,\n" +
                "ts bigint\n" +
                //"PRIMARY KEY (id) NOT ENFORCED \n" +
                ")" + KafkaUtil.getKafkaSinkDDL("dwd_test_exam_paper"));
        tableEnv.executeSql("insert into exam_paper_table select * from result_table");
    }
}
