package com.atguigu.flink.chapter11;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lizhenchao@atguigu.cn
 * @Date 2021/8/20 10:54
 */
public class Flink07_SQL_Connect_Kafka {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
      
        StreamTableEnvironment tenv = StreamTableEnvironment.create(env);
        
        // 1. 创建一个动态表与kafak关联(source)
        tenv.executeSql("create table sensor(id string, ts bigint, vc int)with(" +
                            "   'connector' = 'kafka', " +
                            "   'properties.bootstrap.servers' = 'hadoop162:9092', " +
                            "   'properties.group.id' = 'Flink07_SQL_Connect_Kafka', " +
                            "   'topic' = 's1', " +
                            "   'format' = 'json', " +
                            "   'scan.startup.mode' = 'latest-offset'" +
                            ")");
    
        Table abc = tenv.sqlQuery("select id, vc from sensor");
        
        // 2.  创建一个动态表与kafka关联(sink)
        tenv.executeSql("create table `result`(id string, vc int)with(" +
                            "   'connector' = 'kafka', " +
                            "   'properties.bootstrap.servers' = 'hadoop162:9092', " +
                            "   'topic' = 's2', " +
                            "   'format' = 'json'," +
                            "   'sink.partitioner' = 'fixed'  " +
                            ")");
    
        abc.executeInsert("`result`");
        
        
    
    }
}
