package com.niit.kafka;

import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;

import java.util.Properties;

//异步方式生产数据
public class KafkaProducerTest2 {


    public static void main(String[] args) throws InterruptedException {
        //1.创建链接
        Properties props = new Properties();
        props.put("bootstrap.servers","node1:9092");
        props.put("acks","all");
        props.put("key.serializer","org.apache.kafka.common.serialization.StringSerializer");
        props.put("value.serializer","org.apache.kafka.common.serialization.StringSerializer");

        //2.创建生产者对象
        KafkaProducer<String,String> producer = new KafkaProducer<String, String>(props);

        //3.生产数据
        for (int i =0; i<100000; i++){
            //3.1实例化一个数据对象
            ProducerRecord<String,String> record = new ProducerRecord<>("BD11",null,i+"");

            producer.send(record, new Callback() {
                @Override
                public void onCompletion(RecordMetadata recordMetadata, Exception e) {
                    //1.判断是否发送成功
                    if( e == null){
                        String topic = recordMetadata.topic();
                        int partition = recordMetadata.partition();
                        long timestamp = recordMetadata.timestamp();
                        long offset = recordMetadata.offset();
                        System.out.println("主题："+topic+",分区："+partition+",时间戳："+timestamp+",偏移量："+offset);
                    }else{
                        System.out.println("发生了异常");
                        System.out.println(e.getMessage());
                    }
                }
            });

            Thread.sleep(3000);
        }
        //关闭生产者
        producer.close();


    }

}
