package com.zzz.Producer;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.Properties;
import java.util.Random;

public class KafkaProducerStudy {

    public static void main(String[] args) throws InterruptedException {
        Properties properties = new Properties();
        properties.put("bootstrap.servers", "node01:9092,node02:9092,node03:9092");
        properties.put("acks", "1");
        properties.put("retries", 0);
        //缓冲区的大小  //默认32M
        properties.put("buffer.memory", 33554432);
        //批处理数据的大小，每次写入多少数据到topic   //默认16KB
        properties.put("batch.size", 16384);
        //可以延长多久发送数据   //默认为0 表示不等待 ，立即发送
        properties.put("linger.ms", 1);
        //指定key和value的序列化器
        properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");

        Producer<String,String> producer = new KafkaProducer<String,String>(properties);
        while (true){
            int i = new Random().nextInt(10);
            ProducerRecord<String, String> kvProducerRecord = new ProducerRecord<String, String>("test", Integer.toString(i), "hello-kafka-" + i);
            producer.send(kvProducerRecord);
            Thread.sleep(1000);
        }
    }
}
