package com.atguigu.flink.sql;

import com.atguigu.flink.function.WaterSensorMapFunction;
import com.atguigu.flink.pojo.WaterSensor;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * Created by Smexy on 2023/3/4
 *

 *
 *
 *  Flink doesn't support ENFORCED mode for PRIMARY KEY constraint
 *
 */
public class Demo7_WriteKafkaAgg2
{
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(env);

        SingleOutputStreamOperator<WaterSensor> ds = env
            .socketTextStream("hadoop103", 8888)
            .map(new WaterSensorMapFunction());

        //为流起名字
        tableEnvironment.createTemporaryView("source",ds);

        //使用create语句创建表，映射文件系统中指定的数据
        String createTableSql = " create table t1 ( id string   , vc int , maxTs bigint  ," +
                                "  PRIMARY KEY (id,vc)  NOT ENFORCED )" +
                                " with ( " +
                                 " 'connector' = 'upsert-kafka' ,   " +
                                 " 'topic' =  'topicE'  ," +
                                 " 'properties.bootstrap.servers' = 'hadoop102:9092'," +
                                 "  'key.format' = 'json' ," +
                                 "  'value.format' = 'json' " +
                                 "      )                 ";

        //执行建表
        tableEnvironment.executeSql(createTableSql);

        // 查询语句是一个聚合操作，表中的数据会发生更新， 普通的kafka连接器，无法处理这种更新。 需要选择upsert-kafka连接器
        tableEnvironment.executeSql(" insert into  t1 select id,vc,max(ts) from source group by id ,vc");



    }
}
