package com.tai.mq.kafka;

import org.apache.kafka.clients.producer.ProducerRecord;

import java.time.LocalDateTime;
import java.util.Properties;
import java.util.concurrent.TimeUnit;

/**
 * @Description :  TODO
 * @Author :  ldt
 * @CreateTime :  2023/7/12
 */
public class KafkaProducer {


    public static void main(String[] args) throws InterruptedException {
        Properties properties = new Properties();
        properties.put("bootstrap.servers", "192.168.56.101:9092,192.168.56.102:9092,192.168.56.103:9092");
        properties.put("acks", "all");
        properties.put("retries", 0);
        properties.put("batch.size", 16384);
        properties.put("linger.ms", 1);
        properties.put("buffer.memory", 33554432);
        properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        org.apache.kafka.clients.producer.KafkaProducer<String, String> kafkaProducer = new org.apache.kafka.clients.producer.KafkaProducer<String, String>(properties);
        while (true){
            kafkaProducer.send(new ProducerRecord<>("myTopic1", "myTopic1_"+ LocalDateTime.now()));
            kafkaProducer.send(new ProducerRecord<>("myTopic2", "myTopic2_"+ LocalDateTime.now()));
            kafkaProducer.send(new ProducerRecord<>("myTopic3", "myTopic3_"+ LocalDateTime.now()));
            TimeUnit.MILLISECONDS.sleep(1);
        }

    }

}
