package com.atguigu.flink.datastramapi.sink;

import com.alibaba.fastjson.JSON;
import com.atguigu.flink.function.WaterSensorMapFunction;
import com.atguigu.flink.pojo.WaterSensor;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;

import javax.annotation.Nullable;
import java.nio.charset.StandardCharsets;
import java.util.Properties;

/**
 * Created by Smexy on 2023/2/27
 *
 *  写入kafka，flink程序充当生产者角色。构造 FlinkKafkaProducer
 *
 *           addSink(FlinkKafkaProducer) : FlinkKafkaProducer是老的API，现在也可以使用!
 */
public class Demo4_KafkaSinkWithKey
{
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        env.setParallelism(1);

        /*
             String defaultTopic：  默认主题，没有用，随便写。
            KafkaSerializationSchema<IN> serializationSchema: 序列号器
            Properties producerConfig:  额外的设置.
                    生产者的配置信息从一个固定的类中查询 ProducerConfig
            FlinkKafkaProducer.Semantic semantic: 语义要求(精确一次，至少一次)
         */

        Properties properties = new Properties();
        properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"hadoop102:9092");

        FlinkKafkaProducer<WaterSensor> flinkKafkaProducer = new FlinkKafkaProducer<>(
            "无",
            new KafkaSerializationSchema<WaterSensor>()
            {
                /*
                    在生产者中，key影响数据的分区。key相同的，一定是写到同一个分区。
                    从element中提取key，将key 序列化为 byte[]
                    从element中提取value，将value 序列化为 byte[]
                    把key，value封装为  ProducerRecord 对象
                 */
                @Override
                public ProducerRecord<byte[], byte[]> serialize(WaterSensor element, @Nullable Long timestamp) {
                    byte[] key = element.getId().getBytes(StandardCharsets.UTF_8);
                    //仅仅是为了在kafka tool界面查看方便。
                    byte[] value = JSON.toJSONString(element).getBytes(StandardCharsets.UTF_8);

                    return new ProducerRecord<>("topicC", key, value);
                }
            },
            properties,
            //如果要使用 eos(精确一次)，必须开启 checkpoint!
            FlinkKafkaProducer.Semantic.AT_LEAST_ONCE
        );


        env
            .socketTextStream("hadoop103", 8888)
            .map(new WaterSensorMapFunction())
            .addSink(flinkKafkaProducer);



        try {
            env.execute();
        } catch (Exception e) {
            e.printStackTrace();
        }


    }
}
