package com.raos.kafka.serializer;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.Properties;

/**
 * kafka入门之序列化操作 -- 生产者
 *
 * @author raos
 * @emil 991207823@qq.com
 * @date 2021/6/19 22:45
 */
public class SerializerProducer {

    public static void main(String[] args) {
        Properties props = new Properties();
        //pros.put("bootstrap.servers","192.168.235.102:9093,192.168.235.102:9094,192.168.235.102:9095");
        props.put("bootstrap.servers", "192.168.235.102:9092");
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.put("value.serializer", "com.raos.kafka.serializer.ProtobufSerializer");
        // 0 发出去就确认 | 1 leader 落盘就确认| all 所有Follower同步完才确认
        props.put("acks", "1");
        // 异常自动重试次数
        props.put("retries", 3);
        // 多少条数据发送一次，默认16K
        props.put("batch.size", 16384);
        // 批量发送的等待时间
        props.put("linger.ms", 5);
        // 客户端缓冲区大小，默认32M，满了也会触发消息发送
        props.put("buffer.memory", 33554432);
        // 获取元数据时生产者的阻塞时间，超时后抛出异常
        props.put("max.block.ms", 3000);

        Producer<String, User> producer = new KafkaProducer<>(props);

        User user = new User(100L, "yufeng16", 1, "15583306348");
        producer.send(new ProducerRecord<>("ser-topic", "1", user));

        producer.close();
    }

}
