package cn.doitedu.demo.connectors;

import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class UpsertKafka连接器读取 {
    public static void main(String[] args) {
        // 构造stream api 编程的环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.enableCheckpointing(5000, CheckpointingMode.EXACTLY_ONCE);
        env.getCheckpointConfig().setCheckpointStorage("file:///d:/ckpt");
        env.setParallelism(1);

        // 构造 sql 编程环境
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);


        // 创建目标数据源映射表
        tenv.executeSql("CREATE TABLE t3                  (\n" +
                "  url STRING,                               \n" +
                "  pv  BIGINT,                               \n" +
                "  primary key(url) not enforced             \n" +
                ") WITH (                                    \n" +
                "  'connector' = 'upsert-kafka',                    \n" +
                "  'topic' = 'ss-3',                         \n" +
                "  'properties.bootstrap.servers' = 'doitedu:9092',\n" +
                "  'properties.group.id' = 'doit44_g2',      \n" +
                "  'key.format' = 'json',                  \n" +
                "  'value.format' = 'json',                  \n" +
                "  'value.fields-include' = 'EXCEPT_KEY'     \n" +
                ")");

        // 普通groupby聚合
        tenv.executeSql(
                "select * from t3 ").print();




    }
}
