package com.atguigu.flink.chapter10;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/*
常用 Connector 读写-------Kafka
 */
public class KafkaConnectorDemo {
    public static void main(String[] args) {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        //通过ddl的方式创建连接器
        tEnv.executeSql("create table sensor(" +
                "id string," +
                "ts bigint," +
                "vc int" +
                ") with(" +
                " 'connector' = 'kafka', " +
                " 'topic' = 's1', " +
                " 'properties.bootstrap.servers' = 'hadoop102:9092'," +
                " 'properties.group.id' = 'atguigu'," +
                " 'scan.startup.mode' = 'latest-offset', " +
                " 'format' = 'csv', " +
                ")");
        Table result = tEnv.sqlQuery("select * from sensor where vc > 10");

        tEnv.executeSql("create table a(" +
                "id string," +
                "ts bigint," +
                "vc int" +
                ") with(" +
                " 'connector' = 'print', " +
                ")");
        tEnv.executeSql("create table b(" +
                "id string," +
                "ts bigint," +
                "vc int" +
                ") with(" +
                " 'connector' = 'print', " +
                ")");
        result.executeInsert("a");
        result.executeInsert("b");

        tEnv.executeSql("create table s2(" +
                "a string," +
                "b bigint," +
                "c int" +
                ") with(" +
                " 'connector' = 'kafka', " +
                " 'topic' = 's2', " +
                " 'properties.bootstrap.servers' = 'hadoop102:9092', " +
                " 'format' = 'csv', " +
                " 'sink.semantic' = 'exactly-once'," +     //严格一次
                " 'sink.transactional-id-prefix' = 'atguigu'," +
                " 'properties.transaction.timeout.ms' = '90000' " +
                ")");

        result.executeInsert("s2");
    }
}

