package cn.atguigu;

import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.Properties;

/**
 * @author JiDingXia
 * @date 2025-02-20 20:58:52
 */
public class CustomProducerWithCallback {
    public static void main(String[] args) {
        // 1. 创建kafka生产者的配置对象
        Properties properties = new Properties();
        // 2. 给kafka配置对象添加配置信息
        // key,value序列化类
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"hadoop102:9092");
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        // 3. 创建kafka生产者对象
        KafkaProducer<String, String> kafkaProducer = new KafkaProducer<String, String>(properties);

        // 4. 调用send方法,发送消息
        for (int i = 0; i < 10; i++) {
            //在jdk1.8后,final不是必须的,只要是这个变量在后面的代码中没有修改,就可以省略final
            final ProducerRecord<String,String> producerRecord = new ProducerRecord<>("first", "kafka" + i);
            kafkaProducer.send(producerRecord, new Callback() {
                // 该方法在Producer收到ack时调用，为异步调用
                @Override
                public void onCompletion(RecordMetadata recordMetadata, Exception e) {
                    if(e==null){
                        //发送成功，无Exception
                        //匿名内部类,使用final变量producerRecord
                        System.out.println("producerRecord:"+producerRecord.value()+", Topic:"+recordMetadata.topic() +
                                ", Partition:"+recordMetadata.partition()+", Offset:"+
                                recordMetadata.offset());
                    }else{
                        //发送失败，有Exception
                        e.printStackTrace();
                    }
                }
            });
        }
        // 5. 关闭资源
        kafkaProducer.close();
    }
}
