package com.atguigu.flink.datastramapi.sink;

import com.alibaba.fastjson.JSON;
import com.atguigu.flink.function.WaterSensorMapFunction;
import com.atguigu.flink.pojo.WaterSensor;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;

/**
 * Created by Smexy on 2023/2/27
 *
 *  写入kafka，flink程序充当生产者角色。构造 FlinkKafkaProducer
 *
 *           addSink(FlinkKafkaProducer) : FlinkKafkaProducer是老的API，现在也可以使用!
 */
public class Demo3_KafkaSink
{
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);

        /*
                (k,v) :  数据存放在value中，key是可选的。
                          不写key

             String brokerList, ： 集群地址
              String topicId:  写出主题
              SerializationSchema<IN> serializationSchema: value的序列化器
                IN: 要写出的类型。
         */
        FlinkKafkaProducer<String> flinkKafkaProducer = new FlinkKafkaProducer<>("hadoop102:9092", "topicB", new SimpleStringSchema());

         env
            .socketTextStream("hadoop103", 8888)
            .map(new WaterSensorMapFunction())
            //把pojo转为json字符串
            .map(JSON::toJSONString)
            .addSink(flinkKafkaProducer);



        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }


    }
}
