package cn.doitedu.demo.base;

import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class 各类字段定义演示 {
    public static void main(String[] args) {

        // 构造stream api 编程的环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:///d:/ckpt");

        // 构造 sql 编程环境
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);


        // 将kafka中ss topic，映射成一张flink sql的表
        tenv.executeSql(
                     " CREATE TABLE KafkaTable (                           "+
                        "   uid BIGINT,                                      "+  // 物理字段
                        "   event_id STRING,                                 "+  // 物理字段
                        "   properties MAP<STRING,STRING>,                   "+  // 物理字段
                        "   action_time BIGINT,                              "+  // 物理字段

                        "   uid2 as uid+10,                                  "+  // 逻辑表达式字段
                        "   eid2 as upper(event_id),                         "+  // 逻辑表达式字段
                        "   action_time2 as to_timestamp_ltz(action_time,3), "+  // 逻辑表达式字段

                        "   partition_id int    metadata from 'partition',   "+  // 元数据字段，来自于连接器提供的元数据：partition
                        "   `offset`      bigint metadata,                   "+  // 元数据字段，定义的字段名与元数据的key名相同，则可以简写

                        // 利用proctime()函数得到一个表达式字段，而且该字段具备“处理时间”语义
                        "   pt  as proctime(),                               "+

                        // 在 rt 字段上声明了watermark，那么 rt 字段是一个具备“事件时间”语义的字段
                        "   rt  as  to_timestamp_ltz(action_time,3),         "+
                        "   watermark for rt  as  rt - interval '0' second   "+

                        " ) WITH (                                           "+
                        "   'connector' = 'kafka',                           "+
                        "   'topic' = 'ss-1',                                "+
                        "   'properties.bootstrap.servers' = 'doitedu:9092', "+
                        "   'properties.group.id' = 'doit44_g1',             "+
                        "   'scan.startup.mode' = 'latest-offset',           "+
                        "   'value.format' = 'json',                         "+
                        "   'value.fields-include' = 'EXCEPT_KEY'            "+
                        " )                                                  "
        );

        tenv.executeSql("desc KafkaTable").print();


    }


}
