package com.atguigu.flink.sql;

import com.atguigu.flink.function.WaterSensorMapFunction;
import com.atguigu.flink.pojo.WaterSensor;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

/**
 * Created by Smexy on 2023/3/4
 *
 *  读取一个文件，将文件中的数据映射为 Table
 */
public class Demo5_WriteKafka
{
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(env);

        SingleOutputStreamOperator<WaterSensor> ds = env
            .socketTextStream("hadoop103", 8888)
            .map(new WaterSensorMapFunction());

        Table table = tableEnvironment.fromDataStream(ds);

        //使用create语句创建表，映射文件系统中指定的数据
        String createTableSql = " create table t1 ( id string,ts bigint, vc int  )" +
                                " with ( " +
                                 " 'connector' = 'kafka' ,   " +
                                 " 'topic' =  'topicC'  ," +
                                 " 'properties.bootstrap.servers' = 'hadoop102:9092'," +
                                 "  'sink.partitioner' = 'round-robin'  , " +
                                 "  'format' = 'json' " +
                                 "      )                 ";

        //执行建表
        tableEnvironment.executeSql(createTableSql);

        table.executeInsert("t1");


    }
}
