package com.ln.kafka.v2_4_0.producer;

import org.apache.kafka.clients.producer.*;

import java.util.Properties;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;

/**
 * 几点建议
 * 1. Kafka Producer 是线程安全的，建议多线程复用。如果每个线程都创建，就会出现大量的上下文切换或争抢的情况，影响Kafka的性能
 * 2. Kafka Producer 的key 是一个很重要的内容
 *  2.1 我们可以根据key完成Partition的负载均衡
 *  2.2 合理的key设计，可以让Flink、Spark Streaming之类的实时分析工具做更快速的处理
 *
 * 3. ack - all：Kafka层面上已经有了只有一次消息的投递暴涨，但是如果真的不想丢数据，最好自行处理异常
 */
public class ProducerSimple {

    public static String TOPIC_NAME = "topic-3";

    public static void main(String[] args) throws ExecutionException, InterruptedException {
        // 异步发送
//        asyncSend();

        // 同步发送
//        syncSend();

        // 发送回调
//        callbackSend();

        // 异步发送再回调函数和Partition负载均衡
        callbackSendAndPartition();

    }

    /**
     * 异步发送再回调函数和Partition负载均衡
     */
    public static void callbackSendAndPartition() throws ExecutionException, InterruptedException {
        Properties properties = new Properties();
        properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
        //
        properties.setProperty(ProducerConfig.ACKS_CONFIG, "all");
        properties.setProperty(ProducerConfig.RETRIES_CONFIG, "0");
        // 配置：达到多少批次的时候发送
        properties.setProperty(ProducerConfig.BATCH_SIZE_CONFIG, "16384");
        // 配置：1毫秒接到多少数据一次性发送
        properties.setProperty(ProducerConfig.LINGER_MS_CONFIG, "1");
        properties.setProperty(ProducerConfig.BUFFER_MEMORY_CONFIG, "33554432");
        properties.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
        properties.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
        // 配置：自定义的partition配置
        properties.setProperty(ProducerConfig.PARTITIONER_CLASS_CONFIG, "com.lslx.learn.kafka.producer.SimplePartition");

        // Producer 主对象
        Producer<String, String> producer = new KafkaProducer<>(properties);

        for (int i = 0; i < 10; i++) {
            // 消息对象 - ProducerRecord
            ProducerRecord<String, String> record = new ProducerRecord<>(TOPIC_NAME, "key-" + i, "value-" + i);
            producer.send(record, (metadata, exception) -> {
                System.out.println("partition: " + metadata.partition() + " , offset: " + metadata.offset());
            });
        }

        //所有的通道打开都需要关闭
        producer.close();
    }

    /**
     * 异步发送再回调函数
     */
    public static void callbackSend() throws ExecutionException, InterruptedException {
        Properties properties = new Properties();
        properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
        //
        properties.setProperty(ProducerConfig.ACKS_CONFIG, "all");
        properties.setProperty(ProducerConfig.RETRIES_CONFIG, "0");
        // 配置：达到多少批次的时候发送
        properties.setProperty(ProducerConfig.BATCH_SIZE_CONFIG, "16384");
        // 配置：1毫秒接到多少数据一次性发送
        properties.setProperty(ProducerConfig.LINGER_MS_CONFIG, "1");
        properties.setProperty(ProducerConfig.BUFFER_MEMORY_CONFIG, "33554432");
        properties.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
        properties.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");

        // Producer 主对象
        Producer<String, String> producer = new KafkaProducer<>(properties);

        for (int i = 0; i < 10; i++) {
            // 消息对象 - ProducerRecord
            ProducerRecord<String, String> record = new ProducerRecord<>(TOPIC_NAME, "key-" + i, "value-" + i);
            producer.send(record, (metadata, exception) -> {
                System.out.println("partition: " + metadata.partition() + " , offset: " + metadata.offset());
            });
        }

        //所有的通道打开都需要关闭
        producer.close();
    }

    /**
     * 同步发送 , 其实是异步阻塞发送
     */
    public static void syncSend() throws ExecutionException, InterruptedException {
        Properties properties = new Properties();
        properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
        //
        properties.setProperty(ProducerConfig.ACKS_CONFIG, "all");
        properties.setProperty(ProducerConfig.RETRIES_CONFIG, "0");
        properties.setProperty(ProducerConfig.BATCH_SIZE_CONFIG, "16384");
        properties.setProperty(ProducerConfig.LINGER_MS_CONFIG, "1");
        properties.setProperty(ProducerConfig.BUFFER_MEMORY_CONFIG, "33554432");
        properties.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
        properties.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");

        // Producer 主对象
        Producer<String, String> producer = new KafkaProducer<>(properties);

        for (int i = 0; i < 10; i++) {
            // 消息对象 - ProducerRecord
            ProducerRecord<String, String> record = new ProducerRecord<>(TOPIC_NAME, "key-" + i, "value-" + i);
            Future<RecordMetadata> send = producer.send(record);
            RecordMetadata recordMetadata = send.get();
            System.out.println("partition: " + recordMetadata.partition() + " , offset: " + recordMetadata.offset());
        }

        //所有的通道打开都需要关闭
        producer.close();
    }

    /**
     * 异步发送
     */
    public static void asyncSend() {
        Properties properties = new Properties();
        properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
        //
        properties.setProperty(ProducerConfig.ACKS_CONFIG, "all");
        properties.setProperty(ProducerConfig.RETRIES_CONFIG, "0");
        properties.setProperty(ProducerConfig.BATCH_SIZE_CONFIG, "16384");
        properties.setProperty(ProducerConfig.LINGER_MS_CONFIG, "1");
        properties.setProperty(ProducerConfig.BUFFER_MEMORY_CONFIG, "33554432");
        properties.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
        properties.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");

        // Producer 主对象
        Producer<String, String> producer = new KafkaProducer<>(properties);

        for (int i = 0; i < 10; i++) {
            // 消息对象 - ProducerRecord
            ProducerRecord<String, String> record = new ProducerRecord<>(TOPIC_NAME, "key-" + i, "value-" + i);
            producer.send(record);
        }

        //所有的通道打开都需要关闭
        producer.close();
    }

}
