package cn.doitedu.cn.doitedu.rtdw.ddl;

import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.catalog.hive.HiveCatalog;

public class DwdEvents {
    public static void main(String[] args) {

        // 编程环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:/d:/ckpt");
        env.setParallelism(1);

        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);


        // 构造一个hive catalog
        HiveCatalog hiveCatalog = new HiveCatalog("doit40-hive", null, "rtdw/src/main/resources/");

        // 注册catalog到 tableEnv中
        tenv.registerCatalog("doit40-hive",hiveCatalog);

        // 切换到 hiveCatalog
        tenv.useCatalog("doit40-hive");

        // 在hiveCatalog(doit40-hive)中创建数据库： doit40_rtdw
        tenv.executeSql("create database if not exists doit40_rtdw");

        // 在  doit40-hive.doit40_rtdw 库中，建表
        tenv.useDatabase("doit40_rtdw");

        // 建表
        tenv.executeSql("drop table if exists dwd_kafka");
        tenv.executeSql(
                "  CREATE TABLE dwd_kafka(                           "
                        +"     username          string,                     "
                        +"     session_id        string,                     "
                        +"     event_id          string,                     "
                        +"     event_time        bigint,                     "
                        +"     lat               double,                     "
                        +"     lng               double,                     "
                        +"     release_channel   string,                     "
                        +"     device_type       string,                     "
                        +"     properties        map<string,string>,         "

                        +"     user_id           BIGINT,                     "
                        +"     register_phone    STRING,                     "
                        +"     user_status       INT,                        "
                        +"     register_time     TIMESTAMP(3),               "
                        +"     register_gender   INT,                        "
                        +"     register_birthday DATE,                       "
                        +"     register_city        STRING,                  "
                        +"     register_job         STRING,                  "
                        +"     register_source_type INT,                     "
                        +"     member_level_id   INT,                        "

                        +"     gps_province STRING,                          "
                        +"     gps_city     STRING,                          "
                        +"     gps_region   STRING,                          "

                        +"     page_url     STRING,                          "
                        +"     url_prefix    STRING,                         "
                        +"     page_type    STRING,                          "
                        +"     page_service STRING ,                         "
                        +"     pt as proctime(),                             "
                        +"     rt as  to_timestamp_ltz(event_time,3) ,       "
                        +"     watermark for rt as rt - interval '0' second  "
                        +" ) WITH (                                          "
                        +"  'connector' = 'kafka',                           "
                        +"  'topic' = 'dwd-events-detail',                   "
                        +"  'properties.bootstrap.servers' = 'doitedu:9092', "
                        +"  'properties.group.id' = 'testGroup',             "
                        +"  'scan.startup.mode' = 'latest-offset',         "
                        +"  'value.format'='json',                           "
                        +"  'value.json.fail-on-missing-field'='false',      "
                        +"  'value.fields-include' = 'EXCEPT_KEY')           ");


        tenv.executeSql("show tables").print();


    }

}
