package com.fanxl.flink.datastream.sink;

import com.sun.istack.internal.Nullable;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.Properties;

/**
 * @author Dell
 * @Title:
 * @Description: 请写注释类
 * @date 2023/5/17
 */
public class KafkaTagSink {
    public static void main(String[] args) throws Exception {
        final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        // 设置 Kafka 生产者属性
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", "10.215.15.30:9092");

        // 创建 Kafka 生产者 Sink
        FlinkKafkaProducer<String> kafkaProducer = new FlinkKafkaProducer<>(
                "topic-name",
                new MyKafkaSerializationSchema(),
                properties,
                FlinkKafkaProducer.Semantic.EXACTLY_ONCE);

        String s = "1,2,3";
        // 从数据源获取数据流
        DataStream<String> dataStream = env.fromElements(s); // 设置数据源


        // 将数据流写入 Kafka
        dataStream.addSink(kafkaProducer);

        // 执行任务
        env.execute("Kafka Tag Producer Example");
    }

    // 自定义 Kafka 序列化模式
    public static class MyKafkaSerializationSchema implements KafkaSerializationSchema<String> {
        @Override
        public ProducerRecord<byte[], byte[]> serialize(String element, @Nullable Long timestamp) {
            // 为消息添加 tag
            String tag = "my-tag";
            String value = element + "," + tag;
            return new ProducerRecord<>("topic-name", value.getBytes());
        }
    }


}
