package com.lagou.kafka.demo.producer;

import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;

import java.util.HashMap;

public class MyProducer3 {
    public static void main(String[] args) {
        HashMap<String, Object> configs = new HashMap<String, Object>();
        configs.put("bootstrap.servers", "hadoop001:9092");
        configs.put("key.serializer", "org.apache.kafka.common.serialization.IntegerSerializer");
        configs.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        KafkaProducer<Integer, String> producer = new KafkaProducer<Integer, String>(configs);

        for (int i = 100 ; i < 110; i++){
            ProducerRecord<Integer, String> record = new ProducerRecord<Integer, String>(
                    "topic_1",
                    0,
                    i,
                    "lagou message " + i
            );
            // 使⽤回调异步等待消息的确认
            producer.send(record, new Callback() {
                public void onCompletion(RecordMetadata metadata, Exception exception) {
                    if( exception == null ){
                        System.out.println(
                                "主题：" + metadata.topic() + "\n" +
                                        "分区：" + metadata.partition() + "\n" +
                                        "偏移量：" + metadata.offset() + "\n" +
                                        "序列化的key字节：" + metadata.serializedKeySize() + "\n" +
                                        "序列化的value字节：" + metadata.serializedValueSize() + "\n" +
                                        "时间戳：" + metadata.timestamp() + "\n"
                        );
                    }else {
                        System.out.println("有异常:" + exception.getMessage());
                    }
                }
            });
        }
        // 关闭连接
        producer.close();
    }
}
