package com.kafka;

import com.alibaba.fastjson.JSON;
import com.kafka.pojo.User;
import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.Properties;
import java.util.UUID;

/**
 * kafka生产端
 *
 * @author 顾志杰
 * @date 2020/8/7-11:01
 */
public class CollectKafkaProducer {
    /**
     * 创建一个kafka生产者
     */
    private final KafkaProducer<String, String> producer;
    /**
     * 定义一个成员变量为topic
     */
    private final String topic;

    /**
     * 初始化kafka的配置文件和实例：Properties & KafkaProducer
     *
     * @param topic
     */
    public CollectKafkaProducer(String topic) {
        Properties props = new Properties();
        // 配置broker地址
//        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.35.138:9092");
        props.put("bootstrap.servers", "192.168.35.138:9092");
        // 定义一个 client.id 标记kafkaclient的id
//        props.put(ProducerConfig.CLIENT_ID_CONFIG, "demo-producer-test");
        props.put("client.id", "demo-producer-test");


        // 其他配置项：

//		props.put("batch.size", 16384);			//16KB -> 满足16KB发送批量消息
//		props.put("linger.ms", 10); 			//10ms -> 满足10ms时间间隔发送批量消息
//		props.put("buffer.memory", 33554432);	 //32M -> 缓存提性能

        // kafka 序列化配置：
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
//        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        // 创建 KafkaProducer 与 接收 topic
        this.producer = new KafkaProducer<>(props);
        this.topic = topic;
    }

    /**
     * 发送消息 （同步或者异步）
     *
     * @param message
     * @param syncSend
     * @throws InterruptedException
     */
    public void send(Object message, boolean syncSend) throws InterruptedException {

        try {
            // 同步发送
            if (syncSend) {
                producer.send(new ProducerRecord<>(topic, JSON.toJSONString(message)));
            }
            // 异步发送（callback实现回调监听）
            else {
                producer.send(new ProducerRecord<>(topic,
                                JSON.toJSONString(message)),
                        (recordMetadata, e) -> {
                            if (e != null) {
                                System.err.println("Unable to write to Kafka in CollectKafkaProducer [" + topic + "] exception: " + e);
                            }
                        });
            }
        } finally {
            this.close();
        }


    }

    /**
     * 关闭producer
     */
    public void close() {
        producer.close();
    }

    /**
     * 测试函数
     *
     * @param args
     * @throws InterruptedException
     */
    public static void main(String[] args) throws InterruptedException {
        String topic = "topic1";
        CollectKafkaProducer collectKafkaProducer = new CollectKafkaProducer(topic);


        User user = new User();
        user.setId(UUID.randomUUID().toString());
        user.setName("张三");
        collectKafkaProducer.send(user, true);


        Thread.sleep(Integer.MAX_VALUE);

    }


}
