package com.atguigu.sink;

import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import javax.annotation.Nullable;
import java.nio.charset.StandardCharsets;

/**
 * @author gmd
 * @desc 输出到kafka，指定key，自定义序列化
 * @since 2024-11-26 11:36:51
 */
public class SinkKafkaWithKey {

    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        env.enableCheckpointing(2000, CheckpointingMode.EXACTLY_ONCE);
        env.setRestartStrategy(RestartStrategies.noRestart());

        SingleOutputStreamOperator<String> sensorDS = env.socketTextStream("127.0.0.1", 7777);


        /*
         * 如果要指定写入kafka的key，可以自定义序列器。
         *
         * 自定义序列器步骤：
         * 1. 实现 KafkaRecordSerializationSchema 接口，重写 serialize 方法。
         * 2. 指定 key，转换成字节数组。
         * 3. 指定 value，转换成字节数组。
         * 4. 返回一个 ProducerRecord 对象，把 key 和 value 放进去。
         *
         * 该序列器用于将数据写入 Kafka 时指定 key。
         */
        KafkaSink<String> kafkaSink = KafkaSink.<String>builder()
                .setBootstrapServers("hadoop102:9092,hadoop103:9092,hadoop104:9092")
                .setRecordSerializer(
                    new KafkaRecordSerializationSchema<String>() {
                        @Override
                        public ProducerRecord<byte[], byte[]> serialize(String element, KafkaSinkContext context, Long timestamp) {
                            String[] datas = element.split(",");
                            byte[] key = datas[0].getBytes(StandardCharsets.UTF_8); // 将第一个部分作为Key
                            byte[] value = element.getBytes(StandardCharsets.UTF_8); // 将整个字符串作为Value
                            return new ProducerRecord<>("flinkTopic", key, value); // 创建包含Key和Value的ProducerRecord对象
                        }
                    }
                )
                .setDeliveryGuarantee(DeliveryGuarantee.EXACTLY_ONCE)
                .setTransactionalIdPrefix("atguigu-")
                .setProperty(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG, 10 * 60 * 1000 + "")
                .build();


        sensorDS.sinkTo(kafkaSink);
        env.execute();
    }

}
