package com.atguigu.flink0624.chapter11;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

// 静态导入

/**
 * @Author lizhenchao@atguigu.cn
 * @Date 2021/11/19 10:26
 */
public class Flink06_Sql_Kafka {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);
    
        // 建立一个动态表与file管理
        tenv.executeSql("create table sensor(" +
                            "   id string, " +
                            "   ts bigint, " +
                            "   vc int " +
                            ")with(" +
                            "   'connector'='kafka', " +
                            "   'properties.bootstrap.servers'='hadoop162:9092', " +
                            "   'properties.group.id'='Flink06_Sql_Kafka'," +
                            "   'topic'='s1', " +
                            "   'scan.startup.mode'='latest-offset', " +
                            "   'format'='csv' " +
                            ")");
    
    
    
    
        tenv.executeSql("create table sensor_2(" +
                            "   id string, " +
                            "   ts bigint, " +
                            "   vc int " +
                            ")with(" +
                            "   'connector'='kafka', " +
                            "   'properties.bootstrap.servers'='hadoop162:9092', " +
                            "   'topic'='s2', " +
                            "   'sink.partitioner'='round-robin',  " +
                            "   'format'='csv' " +
                            ")");
    
    
        Table table = tenv.sqlQuery("select * from sensor where id='sensor_1'");
        table.executeInsert("sensor_2");
        
    }
}
