package com.atguigu.flink.streamapi.sink;

import com.alibaba.fastjson.JSON;
import com.atguigu.flink.pojo.WaterSensor;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;
import java.nio.charset.StandardCharsets;
import java.util.Properties;

/**
 * Created by Smexy on 2022/11/21
 *
 *  sink的套路:  DataStreamSink<T> addSink(SinkFunction<T> sinkFunction)
 *
 *  如果需要写入key
 */
public class Demo2_KafkaSinkWithKey
{
    public static void main(String[] args) {


        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //声明如何去序列化数据
        KafkaSerializationSchema<WaterSensor> kafkaSerializationSchema = new KafkaSerializationSchema<WaterSensor>()
        {
            /*
                序列化细节serialize：
                        传入 WaterSensor element ，返回ProducerRecord<byte[], byte[]>
             */
            @Override
            public ProducerRecord<byte[], byte[]> serialize(WaterSensor element, @Nullable Long timestamp) {

                //选择id作为key，同一个id放在一个分区
                byte[] key = element.getId().getBytes(StandardCharsets.UTF_8);

                //调用toString，先转为字符串，再转为 byte[]
                byte[] value = JSON.toJSONString(element).getBytes(StandardCharsets.UTF_8);

                return new ProducerRecord<byte[], byte[]>("t3", key, value);
            }
        };


        Properties properties = new Properties();
        properties.put("bootstrap.servers","hadoop102:9092");
        /*
                public FlinkKafkaProducer(
            String defaultTopic,                 //指定一个默认的主题，没用，随便写，根本用不上
            KafkaSerializationSchema<IN> serializationSchema  //自定义ProducerRecord中的key和value如何进行序列化
                                                        序列化： 把key，value原先的类型 转换 为  byte[]
            Properties producerConfig          // 生产者的参数，填写集群地址
            FlinkKafkaProducer.Semantic semantic //语义
            )
         */
        FlinkKafkaProducer<WaterSensor> flinkKafkaProducer = new FlinkKafkaProducer<WaterSensor>(
            "无",
            kafkaSerializationSchema,
            properties,
            FlinkKafkaProducer.Semantic.AT_LEAST_ONCE
            );

        env
           .socketTextStream("hadoop103", 8888)
           .map(new MapFunction<String, WaterSensor>()
           {
               @Override
               public WaterSensor map(String value) throws Exception {
                   String[] data = value.split(",");
                   return new WaterSensor(
                       data[0],
                       Long.valueOf(data[1]),
                       Integer.valueOf(data[2])
                   );
               }
           })
           .addSink(flinkKafkaProducer);

        try {
                    env.execute();
                } catch (Exception e) {
                    e.printStackTrace();
                }

    }
}
