package org.example.flinksql.connector;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;


public class Kafka {
    public static void main(String[] args) {
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        EnvironmentSettings bsSettings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build();
        StreamTableEnvironment tbEnv = StreamTableEnvironment.create(env, bsSettings);

        // create an input table with kafka connector
        // kafka message: {"username":"robbin","action":"test7"}
        tbEnv.executeSql("CREATE TABLE t_records (" +
                "username STRING," +
                "action STRING) WITH (" +
                "'connector' = 'kafka'," +
                "'topic' = 'sync_t_records'," +
                "'properties.bootstrap.servers' = 'under.azure:9092'," +
                "'properties.group.id' = 'flink-test'," +
                "'scan.startup.mode' = 'earliest-offset'," +
                "'format' = 'json'" +
                ")");


        tbEnv.sqlQuery("select * from t_records").execute().print();



        //TableEnvironment 在executeXXX时已异步执行任务，不需要再使用.execute()执行
//        tbEnv.execute("JdbcTest");
    }
}
