package com.atguigu.edu.realtime.app.dwd.db;

import com.atguigu.edu.realtime.util.KafkaUtil;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class DwdTestExam {
    public static void main(String[] args) {
        // TODO 1、获取执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        // TODO  2、设置检查点和状态后端
       /*
        env.enableCheckpointing(5 * 60 * 1000L, CheckpointingMode.EXACTLY_ONCE);
        //延迟时间
        env.getCheckpointConfig().setCheckpointTimeout(3 * 60 * 1000L);
        //最多可以出现多少状态后端
        env.getCheckpointConfig().setMaxConcurrentCheckpoints(2);
        //检查点保存位置
        env.getCheckpointConfig().setCheckpointStorage("hdfs://hadoop102:8020/gmall/ck");
        System.setProperty("HADOOP_USER_NAME", "atguigu");
        //开启状态后端
        env.setStateBackend(new HashMapStateBackend());*/
       //TODO 3、读取卡夫卡数据
        String topicName = "topic_db";
        String groupID = "dwd_trade_order_detail";
        tableEnv.executeSql( "CREATE TABLE topic_db (\n" +
                "`database` string,\n" +
                "  `table` string,\n" +
                "  `type` string,\n" +
                "  `ts` bigint,\n" +
                "  `xid` string,\n" +
                "  `commit` string,\n" +
                "  `data` map<string,string>,\n" +
                "  `old` map<string,string>,\n" +
                "   pt AS PROCTIME() \n" +
                ")"+ KafkaUtil.getKafkaDDL(topicName,groupID));

        Table table = tableEnv.sqlQuery("select\n " +
                "`data`['id'],\n" +
                "`data`['user_id'],\n" +
                "`data`['paper_id'],\n" +
                "cast(`data`['score'] as bigint),\n" +
                "cast(`data`['duration_sec'] as bigint),\n" +
                "`data`['submit_time'],\n" +
                "cast(`data`['submit_time'] as bigint) ts,\n"+
                "pt\n"+
                " from topic_db\n"+
                "where `table`='test_exam'\n" +
                "and `data`['submit_time'] is not null");

        table.printSchema();
        tableEnv.createTemporaryView("test_exam",table);

        tableEnv.executeSql("CREATE TABLE text_exam (\n"+
                "`id` String,\n"+
                "`user_id` String,\n"+
                "`paper_id` String,\n"+
                "`score` bigint,\n"+
                "`duration_sec` bigint,\n"+
                "`submit_time` String,\n"+
                "ts bigint,"+
                "pt TIMESTAMP_LTZ(3) NOT NULL)"+KafkaUtil.getKafkaSinkDDL("dwd_text_exam"));

        tableEnv.executeSql("insert into text_exam select * from test_exam");

    }

}
