package com.atguigu.flink.datastreamapi.sink;

import com.alibaba.fastjson.JSON;
import com.atguigu.flink.function.WaterSensorMapFunction;
import com.atguigu.flink.pojo.WaterSensor;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;
import java.nio.charset.StandardCharsets;
import java.util.Properties;

/**
 * Created by Smexy on 2022/12/14
 *
 *  key仅仅用于分区！
 *          key相同的，一定写入到kafka的同一个分区！
 *
 */
public class Demo3_KafkaSinkWithKey
{
    public static void main(String[] args) {
        
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        Properties properties = new Properties();
        //ProducerConfig: 存放kakfa的生产者的各种参数
        properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"hadoop102:9092");

        /*
           FlinkKafkaProducer(
            String defaultTopic,   //写入的默认主题，基本用不上。
            KafkaSerializationSchema<IN> serializationSchema,  //KEY-VALUE如何序列化
                    IN: 流中输入的类型。 sink的上游输出的数据类型
            Properties producerConfig, //额外的参数
            FlinkKafkaProducer.Semantic semantic  //语义
            )
         */
        FlinkKafkaProducer<WaterSensor> flinkKafkaProducer = new FlinkKafkaProducer<>(
            "无",
            new KafkaSerializationSchema<WaterSensor>()
            {

                /*
                        serialize: 序列化
                            手动把 WaterSensor element 序列化为 byte[]。把 byte[] 封装为 ProducerRecord
                 */
                @Override
                public ProducerRecord<byte[], byte[]> serialize(WaterSensor element, @Nullable Long timestamp) {

                    //key: 取 id。同一种类型的传感器在kafka的一个分区
                    byte[] key = element.getId().getBytes(StandardCharsets.UTF_8);

                    //为了在外部软件中查看方便，将WaterSensor先转为String，再转为byte[]
                    byte[] value = JSON.toJSONString(element).getBytes(StandardCharsets.UTF_8);

                    ProducerRecord<byte[], byte[]> record = new ProducerRecord<>("topicC", key, value);

                    return record;
                }
            },
            properties,
            //目前没有开启checkpoint，无法使用 EOS
            FlinkKafkaProducer.Semantic.AT_LEAST_ONCE
        );

        env
            .socketTextStream("hadoop103", 8888)
            .map(new WaterSensorMapFunction())
            .addSink(flinkKafkaProducer);

        try {
                            env.execute();
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
        
    }
}
