package com.atguigu.flink.tableapi;

import com.atguigu.flink.function.WaterSensorMapFunction;
import com.atguigu.flink.pojo.WaterSensor;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.*;
import org.apache.kafka.clients.producer.ProducerConfig;

/**
 * Created by Smexy on 2022/12/21
 */
public class Demo6_WriteKafka
{
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);

        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);

        SingleOutputStreamOperator<WaterSensor> ds = env
            .socketTextStream("hadoop103", 8888)
            .map(new WaterSensorMapFunction());


        Table table = tableEnv.fromDataStream(ds);

        /*
                注册向kafka写的连接器
         */
        Kafka kafka = new Kafka()
            .topic("topicD")
            .property(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "hadoop102:9092")
            //轮询写
            //.sinkPartitionerRoundRobin()
            //尽量是一个sink的Task向一个固定的分区写。 如果kafka的分区少，sink的Task多，出现多个Task向1个分区写
            .sinkPartitionerFixed()
            //必须加
            .version("universal")
            ;


        //声明表的格式   https://nightlies.apache.org/flink/flink-docs-release-1.13/docs/dev/table/types/ flink中支持的数据类型
        Schema schema = new Schema().field("id", DataTypes.STRING())
                                    .field("ts", DataTypes.BIGINT())
                                    .field("vc", DataTypes.INT());

        //读文件，制作为表
        tableEnv.connect(kafka)
                .withFormat(new Json())  //声明文件中数据的格式
                .withSchema(schema)     //声明表结构
                .createTemporaryTable("t2"); //声明表的名字


        //执行写出
        table.executeInsert("t2");


    }
}
