package com.apps.sdses.flink141.sql;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @author jiwei
 * @description
 * @date 2023/6/12 23:13
 */
public class Datagen2UpsertKafka_WaterSensor {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        //Source
        tableEnv.executeSql("CREATE TABLE sourceTable (\n" +
                "    id        BIGINT,\n" +     //采集信息id
                "    sid        STRING,\n" +    //水位传感器id
                "    ts   TIMESTAMP(3),\n" +    //数据采集时间
                "    flag   BOOLEAN,\n" +       //水位报警开关
                "    vc INT\n" +                //采集到的水位高度
                ") WITH (\n" +
                "  'connector' = 'datagen', \n" +
                "  'rows-per-second' = '1', \n" +   //每秒生成1条数据

                "  'fields.sid.length' = '8', \n" +   //传感器id长度

                "  'fields.id.kind' = 'sequence', \n" +
                "  'fields.id.start' = '1', \n" +
                "  'fields.id.end' = '10000', \n" +

                "  'fields.vc.min' = '10', \n" +
                "  'fields.vc.max' = '100'\n" +
                ")");

        //Sink
        tableEnv.executeSql("CREATE TABLE sinkTable (" +
                "   id  BIGINT,\n" +
                "   sid STRING,\n" +
                "   ts  TIMESTAMP,\n" +
                "   flag    BOOLEAN,\n" +
                "   vc INT,\n" +
                "   PRIMARY KEY (id) NOT ENFORCED" +
                ") WITH (\n" +
                "  'connector' = 'upsert-kafka',\n" +
                "  'topic' = 'water_sensor',\n" +
                "  'properties.bootstrap.servers' = 'localhost:9092',\n" +
//                "  'properties.allow.auto.create.topics' = 'true',\n" +   //默认false，是否禁止自动创建 topic
                "  'key.format' = 'json',\n" +   //json、csv、avro
                "  'value.format' = 'json',\n" +   //json、csv、avro
                "  'value.fields-include' = 'ALL',\n" +   //控制哪些字段应该出现在 value 中
                "  'sink.parallelism' = '3'\n" +
//                "  'sink.buffer-flush.max-rows' = '100',\n" +    //缓存刷新前，最多能缓存多少条记录
//                "  'sink.buffer-flush.interval' = '10'\n" +    //缓存刷新的间隔时间
                ")");

        //Insert
        tableEnv.executeSql("insert into sinkTable select * from sourceTable");


    }
}
