package cn.doitedu.demo.catalog;

import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class InMemoryCatalog的特点 {

    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(2000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:///d:/ckpt");
        env.setParallelism(1);

        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);

        // 这样创建的表，是在 default_catalog中（GenericInMemoryCatalog）
        // GenericInMemoryCatalog是把表的元数据存在内存里
        tenv.executeSql("CREATE TABLE t1_kafka            (\n" +
                "  uid BIGINT                                 \n" +
                "  ,event_id STRING                           \n" +
                "  ,action_time BIGINT                        \n" +
                "  ,rt1 as to_timestamp_ltz(action_time,3)    \n" +
                "  ,watermark for rt1 as rt1                  \n" +
                ") WITH (                                    \n" +
                "  'connector' = 'kafka',                    \n" +
                "  'topic' = 'a-1',                         \n" +
                "  'properties.bootstrap.servers' = 'doitedu:9092',\n" +
                "  'properties.group.id' = 'doit44_g1',      \n" +
                "  'scan.startup.mode' = 'latest-offset',    \n" +
                "  'value.format' = 'json',                  \n" +
                "  'value.fields-include' = 'EXCEPT_KEY'     \n" +
                ")");

        tenv.executeSql("create database doit44");

        tenv.executeSql("show catalogs").print();
        /**
         * +-----------------+
         * |    catalog name |
         * +-----------------+
         * | default_catalog |
         * +-----------------+
         */


        tenv.executeSql("show databases").print();
        /**
         * +------------------+
         * |    database name |
         * +------------------+
         * | default_database |
         * +------------------+
         * |           doit44 |
         * --------------------
         */
        tenv.executeSql("CREATE TABLE doit44.t2_kafka          (\n" +
                "  uid BIGINT                                 \n" +
                "  ,event_id STRING                           \n" +
                "  ,action_time BIGINT                        \n" +
                "  ,rt1 as to_timestamp_ltz(action_time,3)    \n" +
                "  ,watermark for rt1 as rt1                  \n" +
                ") WITH (                                    \n" +
                "  'connector' = 'kafka',                    \n" +
                "  'topic' = 'a-1',                         \n" +
                "  'properties.bootstrap.servers' = 'doitedu:9092',\n" +
                "  'properties.group.id' = 'doit44_g1',      \n" +
                "  'scan.startup.mode' = 'latest-offset',    \n" +
                "  'value.format' = 'json',                  \n" +
                "  'value.fields-include' = 'EXCEPT_KEY'     \n" +
                ")");



        tenv.executeSql("use default_database");
        tenv.executeSql("show tables").print();
        /* *
         * +------------+
         * | table name |
         * +------------+
         * |   t1_kafka |
         * +------------+
         */

        tenv.executeSql("use doit44");
        tenv.executeSql("show tables").print();
        /* *
         * +------------+
         * | table name |
         * +------------+
         * |   t2_kafka |
         * +------------+
         */


    }


}
