package flinkSql.sink;

import bean.SensorReading;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.Csv;
import org.apache.flink.table.descriptors.Kafka;
import org.apache.flink.table.descriptors.Schema;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.junit.Test;

import static org.apache.flink.table.api.Expressions.$;

//TODO 流数据实时写入Kafka中
public class Flink_Sink_Kafka {
    @Test
    public void test() throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment().setParallelism(1);
        StreamTableEnvironment tableEnvironment = StreamTableEnvironment.create(env);

        SingleOutputStreamOperator<SensorReading> input = env.socketTextStream("node193", 9999)
                .map((MapFunction<String, SensorReading>) value -> {
                    String[] split = value.split(",");
                    return new SensorReading(split[0], Long.parseLong(split[1]), Double.parseDouble(split[2]));
                });

        //DSL风格
        Table table = tableEnvironment.fromDataStream(input);
        Table tableResult = table.select($("id"),$("temp"));

        //SQL风格
        tableEnvironment.createTemporaryView("sensor",input);
        Table sqlResult1 = tableEnvironment.sqlQuery("select id,temp from sensor");

        //kafka连接器
        tableEnvironment.connect(new Kafka()
                .version("universal")
                .topic("flink")
                .startFromLatest()
                .sinkPartitionerRoundRobin()
                .property(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.1.190:9092,192.168.1.192:9092,192.168.1.193:9092"))
                .withSchema(new Schema()
                        .field("id",DataTypes.STRING())
                        .field("temp",DataTypes.DOUBLE()))
                .withFormat(new Csv())
                .createTemporaryTable("kafka");

        tableResult.executeInsert("kafka");
        sqlResult1.executeInsert("kafka");

        //执行
        env.execute();
    }

}
