package cn.atguigu;

import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.Properties;
import java.util.concurrent.ExecutionException;

/**
 * @author JiDingXia
 * @date 2025-02-22 20:43:10
 */
public class CustomProducerSync {
	public static void main(String[] args) throws ExecutionException, InterruptedException {
		// 1. 创建kafka生产者的配置对象
		Properties properties = new Properties();
		// 2. 给kafka配置对象添加配置信息
		// key,value序列化类
		properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "hadoop102:9092");
		properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
		properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
		// 3. 创建kafka生产者对象
		KafkaProducer<String, String> kafkaProducer = new KafkaProducer<String, String>(properties);

		// 4. 调用send方法,发送消息
		for (int i = 0; i < 10; i++) {
			ProducerRecord<String, String> producerRecord = new ProducerRecord<>("first", "kafka" + i);
			kafkaProducer.send(producerRecord, new Callback() {
				// 该方法在Producer收到ack时调用，为异步调用
				@Override
				public void onCompletion(RecordMetadata recordMetadata, Exception e) {
					if (e == null) {
						//发送成功，无Exception
						//匿名累不累使用final变量producerRecord
						System.out.println("producerRecord:" + producerRecord.value() + ", Topic:" + recordMetadata.topic() +
							", Partition:" + recordMetadata.partition() + ", Offset:" +
							recordMetadata.offset());
					} else {
						//发送失败，有Exception
						e.printStackTrace();
					}
				}
			}).get();
		}
		// 5. 关闭资源
		kafkaProducer.close();
	}
}
