package com.atguigu.flink.datastreamapi.sink;

import com.alibaba.fastjson.JSON;
import com.atguigu.flink.function.WaterSensorMapFunction;
import com.atguigu.flink.pojo.WaterSensor;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.nio.charset.StandardCharsets;

/**
 * Created by Smexy on 2023/4/6
 *  加入 kafka-connector
 *
 *
 *  eos: exactly once semitic
 */
public class Demo3_KafkaSinkWithKey
{
    public static void main(String[] args) {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        //新的sinkAPI，at least once 不需要开启ck，如果是eos需要开启ck
        //env.enableCheckpointing(5000);

        //准备KafkaSink  角色：生产者  相同key的数据存放在一个partition
        KafkaSink<WaterSensor> kafkaSink = KafkaSink
            .<WaterSensor>builder()
            .setBootstrapServers("hadoop102:9092")
            //设置序列化器
            .setRecordSerializer(
                new KafkaRecordSerializationSchema<WaterSensor>()
                {
                    @Override
                    public ProducerRecord<byte[], byte[]> serialize(WaterSensor element, KafkaSinkContext context, Long timestamp) {
                        //自定义如何把 WaterSensor的信息序列化为 byte[]，再把 byte[]封装为 ProducerRecord
                        // 使用id作为key
                        byte[] key = element.getId().getBytes(StandardCharsets.UTF_8);
                        byte[] value = JSON.toJSONString(element).getBytes(StandardCharsets.UTF_8);

                        return new ProducerRecord<>("topicD",key,value);
                    }
                }
            )
            //其他的生产者设置
            //.setProperty()
            //语义保证  EOS需要开启CK
            .setDeliveryGuarantee(DeliveryGuarantee.AT_LEAST_ONCE)
            //设置kafka生产者事务id的前缀，可以在broker中的_transaction_state中查看。EOS，必须要设置
            .setTransactionalIdPrefix("atguigu-")
            .build();

         env
            .socketTextStream("hadoop102", 8888)
            .map(new WaterSensorMapFunction())
            .sinkTo(kafkaSink);



        try {
                            env.execute();
                        } catch (Exception e) {
                            e.printStackTrace();
                        }

    }
}
