package cn.doitedu.demo.base;

import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class Demo1_sql {
    public static void main(String[] args) {

        // 构造stream api 编程的环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:///d:/ckpt");

        // 构造 sql 编程环境
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);


        // 将要读取的数据源，用连接器映射成一张flink的逻辑表
        tenv.executeSql("CREATE TABLE my_source_table (\n" +
                "  uid BIGINT,                               \n" +
                "  event_id STRING,                          \n" +
                "  properties MAP<STRING,STRING>,            \n" +
                "  action_time BIGINT,                       \n" +
                "  uid2 as uid+10,                           \n" +
                "  qiguai as 'hahaha',                       \n" +
                "  event_id2 as upper(event_id)              \n" +
                ") WITH (                                    \n" +
                "  'connector' = 'kafka',                    \n" +
                "  'topic' = 'ss-1',                         \n" +
                "  'properties.bootstrap.servers' = 'doitedu:9092',\n" +
                "  'properties.group.id' = 'doit44_g1',      \n" +
                "  'scan.startup.mode' = 'latest-offset',    \n" +
                "  'value.format' = 'json',                  \n" +
                "  'value.fields-include' = 'EXCEPT_KEY'     \n" +
                ")");

        tenv.executeSql("select * from my_source_table").print();
        System.exit(1);


        // 将要写出的目标存储，映射成一张flink的逻辑表
        tenv.executeSql("CREATE TABLE my_target_table (\n" +
                "  uid BIGINT,\n" +
                "  event_id STRING,\n" +
                "  properties MAP<STRING,STRING>,\n" +
                "  action_time BIGINT\n" +
                ") WITH (\n" +
                "  'connector' = 'kafka',\n" +
                "  'topic' = 'ss-2',\n" +
                "  'properties.bootstrap.servers' = 'doitedu:9092',\n" +
                "  'properties.group.id' = 'doit44_g1',\n" +
                "  'scan.startup.mode' = 'latest-offset',\n" +
                "  'value.format' = 'json',\n" +
                "  'value.fields-include' = 'EXCEPT_KEY' \n" +
                ")");


        // 开发一个数据运算的sql： insert into 目标逻辑表  select .....  from 源逻辑表
        tenv.executeSql(
                        "insert into  my_target_table \n"+
                        " select                      \n"+
                        "   uid,                      \n"+
                        " 	event_id,                 \n"+
                        " 	properties,               \n"+
                        " 	action_time               \n"+
                        " from my_source_table        \n"+
                        " where event_id='page_load'  \n"+
                        " and properties['ref']='/x'  \n"
        );

    }
}
