package com.atguigu.producer;

import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;

import java.util.Properties;
import java.util.concurrent.ExecutionException;

public class ProducerDemo {
    public static void main(String[] args) throws ExecutionException, InterruptedException {
        //1.指定生产者参数
        Properties props = new Properties();
        props.setProperty("key.serializer","org.apache.kafka.common.serialization.StringSerializer");
        props.setProperty("value.serializer","org.apache.kafka.common.serialization.StringSerializer");
        props.setProperty("acks","1");
        props.setProperty("bootstrap.servers","hadoop102:9092,hadoop103:9092,hadoop104:9092");
        //一批次1kb
        props.setProperty("batch.size","1024");
        //props.put("linger.ms", 1);//等待时间
        //2.创建生产者客户端
        KafkaProducer<String, String> producer = new KafkaProducer<>(props);
        //3.封装数据
        for (int i = 1200; i < 1299; i++) {
            ProducerRecord<String, String> record = new ProducerRecord<>("second", "这是第" + i + "条数据");
            final int index = i;
            //4.发送
            producer.send(record, new Callback() {
                @Override
                public void onCompletion(RecordMetadata metadata, Exception exception) {
                    System.out.println("返回第"+index+"确认消息");
                    String topic = metadata.topic();
                    long offset = metadata.offset();
                    int partition = metadata.partition();
                    System.out.println("topic"+topic+",partition"+partition+",offset"+offset+","+index);
                }
            });
            System.out.println("第"+i+"条数据发送完成");
        }
        //5.关闭
        producer.close();
    }
}
