package com.atguigu.chapter11;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * Author: Pepsi
 * Date: 2023/8/24
 * Desc:
 */
public class Flink09_SQL_Kfaka_Update {
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);


        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);

        tEnv.executeSql("create table sensor(" +
                " id string, " +
                " ts bigint, " +
                " vc int " +
                ")with(" +
                "'connector' = 'kafka'," +
                "'topic' = 'flink_source_kafka'," +
                "'properties.bootstrap.servers' = 'hadoop101:9092'," +
                "'properties.group.id' = 'flink_consumer_group'," +
                "'scan.startup.mode' = 'latest-offset'," +
                "'format' = 'csv'" +
                ")");

        Table result = tEnv.sqlQuery("select id,sum(vc) vc from sensor group by id");

//        result.execute().print();


        tEnv.executeSql("create table abc(" +
                " id string, " +
                " vc int ," +
                "primary key(id) not enforced " + // 对主键不进行约束校验，加主键的目的：保证相同主键的数据进入到同一个分区，会成为Kafka的key，进行重复性校验是否是更新
                ")with(" +
                "'connector' = 'upsert-kafka'," +
                "'topic' = 'flink_sink_kafka'," +
                "'properties.bootstrap.servers' = 'hadoop101:9092'," +
                "'key.format' = 'json'," +
                "'value.format' = 'json'" +
                ")");

        result.executeInsert("abc");
    }
}
