package com.atguigu;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
//import org.apache.flink.streaming.connectors.kafka.KafkaSink;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import java.util.Properties;
public class FlinkKafkaExactOnceAndAsync {

        public static void main(String[] args) throws Exception {
            // 设置Flink执行环境
            StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
            // 读取数据源
            DataStream<String> stream = env.fromElements("hello", "world", "flink");
            // 配置Kafka生产者属性
            Properties props = new Properties();
            props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
            props.setProperty(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG, "60000");
            // 配置Kafka序列化器
            KafkaSerializationSchema<String> schema = new KafkaSerializationSchema<String>() {
                @Override
                public ProducerRecord<byte[], byte[]> serialize(String element, Long timestamp) {
                    return new ProducerRecord<>("my-topic", element.getBytes());
                }
            };

            // 创建FlinkKafkaProducer实例，启用事务
            FlinkKafkaProducer<String> producer = new FlinkKafkaProducer<>(
                    "my-topic",
                    schema,
                    props,
                    FlinkKafkaProducer.Semantic.EXACTLY_ONCE);
            producer.setWriteTimestampToKafka(true);
            // 将数据写入Kafka主题中，使用异步方式将数据发送到Kafka
//            stream.addSink(new KafkaSink<>(producer)).name("Kafka Sink");
            // 执行Flink程序
            env.execute("Flink Kafka Async Sink Example");
        }
    }

