package org.czy.kafka.test.producer;

import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;

/**
 * KafkaProducerTest
 *
 * @author caizhiyang
 * @since 2024-04-25
 */
public class KafkaProducerTest3 {

    public static void main(String[] args) throws ExecutionException, InterruptedException {
        //1.配置属性集合
        Map<String, Object> configMap = new HashMap<>();

        //2.设置kafka集群地址
        configMap.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"localhost:9092");

        //3.配置属性：Kafka生产的数据为KV对，所以在生产数据进行传输前需要分别对K,V进行对应的序列化操作
        configMap.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        configMap.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);


        //4.创建生产者对象
        KafkaProducer<String,String> producer = new KafkaProducer<>(configMap);

        for (int i = 0; i < 10; i++) {
            //5.准备数据,定义泛型
            //构造对象时需要传递 【Topic主题名称】，【Key】，【Value】三个参数
            ProducerRecord<String, String> record = new ProducerRecord<>("test", "key"+i, "value"+i);

            //6.生产者发送数据
            Future<RecordMetadata> metadataFuture = producer.send(record, new Callback() {
                //发送数据后的回调方法
                @Override
                public void onCompletion(RecordMetadata metadata, Exception exception) {
                    System.out.println("数据发送成功:" + metadata.timestamp());
                }
            });

            //producer.send()返回的是一个future对象,调用future.get()会一直阻塞,直到有返回结果
            RecordMetadata recordMetadata = metadataFuture.get();
        }
        System.out.println("发送完成");

        //7.关闭生产者
        producer.close();
    }
}
