package com.atguigu.flink.chapter02_DataStreamAPI.sink;

import com.alibaba.fastjson.JSON;
import com.atguigu.flink.pojo.WaterSensor;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;

/**
 * Created by Smexy on 2022/10/24
 *
 *      ProduceRecord<key(null),value(String)>
 */
public class Demo20_KafkaSink
{
    public static void main(String[] args) {


        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //IN: IN是写到kafka的value的类型
        // ProducerRecord<KEY(一般不设置，设置为了分区，可以用String),VALUE(数据 jsonstr)>
        /*
            FlinkKafkaProducer(
            String brokerList, String topicId, SerializationSchema<IN> serializationSchema)
         */
        //创建一个flink程序用的生产者
        FlinkKafkaProducer<String> flinkKafkaProducer = new FlinkKafkaProducer<String>("hadoop103:9092", "topicC", new SimpleStringSchema());
        env
           .socketTextStream("hadoop103", 8888)
           .map(new MapFunction<String, String>()
           {
               @Override
               public String map(String value) throws Exception {
                   String[] data = value.split(",");
                   return JSON.toJSONString(new WaterSensor(
                       data[0],
                       Long.valueOf(data[1]),
                       Integer.valueOf(data[2]))
                   );
               }
           })
           .addSink(flinkKafkaProducer);


        try {
                    env.execute();
                } catch (Exception e) {
                    e.printStackTrace();
                }

    }
}
