package org.example;

import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.Properties;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;

public class Producer {
    public static void main(String[] args) throws ExecutionException, InterruptedException {
        String serverList = "192.168.64.2:9092";
        String topic = "topic_test";
        Properties prop = new Properties();

        // 2.给 Kafka 配置对象添加配置信息
        // 配置ack参数，0 代表不关注分区副本是否写入成功 1 代表确保leader副本写入成功 -1(all) 代表确保所有分区副本写入成功
        prop.put(ProducerConfig.ACKS_CONFIG, "1");
        prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, serverList);
        // 2.2 配置 key.serializer 和 value.serializer
        prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

        // 3.创建 Kafka 生产者对象
        KafkaProducer<String, String> kafkaProducer = new KafkaProducer<String, String>(prop);
        // 4.调用 send 方法，发送消息
        for (int i = 0; i < 5; i++) {
            kafkaProducer.send(new ProducerRecord<>(topic, "demo" + i), new Callback() {
                @Override
                public void onCompletion(RecordMetadata metadata, Exception exception) {
                    System.out.println("消息发送成功");
                }
            });
        }
        Thread.sleep(10000);
    }
}
