package com.atguigu.flink.chapter11;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * @Author lizhenchao@atguigu.cn
 * @Date 2022/1/22 13:50
 */
public class Flink06_SQL_Connector_kafka {
    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.setInteger("rest.port", 20000);
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
        env.setParallelism(1);
        
        StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
        
        // 1. 使用ddl方式建立一个动态表与文件关联
        tEnv.executeSql("create table sensor(" +
                            "   id string, " +
                            "   ts bigint, " +
                            "   vc int " +
                            ")with(" +
                            "  'connector' = 'kafka',\n" +
                            "  'topic' = 's1',\n" +
                            "  'properties.bootstrap.servers' = 'hadoop162:9092,hadoop163:9092',\n" +
                            "  'properties.group.id' = 'Flink06_SQL_Connector_kafka',\n" +
                            "  'scan.startup.mode' = 'latest-offset',\n" +
                            "  'format' = 'csv'" +
                            ")");
    
    
        tEnv.executeSql("create table rs(" +
                            "   id string, " +
                            "   ts bigint, " +
                            "   vc int " +
                            ")with(" +
                            "  'connector' = 'kafka',\n" +
                            "  'topic' = 's2',\n" +
                            "  'properties.bootstrap.servers' = 'hadoop162:9092,hadoop163:9092',\n" +
                            "  'format' = 'json', " +
                            "  'sink.partitioner' = 'round-robin' " +
                            ")");
        //Table result = tEnv.sqlQuery("select * from sensor where id='sensor_1'");
//        result.executeInsert("rs");
    
        tEnv.executeSql("insert into rs select * from sensor where id='sensor_1'");
    }
}
