package com.xkcoding.mq.kafka;

import com.xkcoding.mq.kafka.config.MyPartitioner;
import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.serialization.StringSerializer;
import org.junit.Test;

import java.util.Properties;
import java.util.concurrent.ExecutionException;

/**
 * TODO
 *
 * @Author Administrator
 * @Date 2022/4/17 0017 16:30
 **/
public class KafkaDemo {

    public static final String BOOTSTRAP_SERVERS = "localhost:9092";

    /**
     * 异步发送
     */
    @Test
    public void test_async_send() {
        Properties properties = new Properties();
        //server
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS);
        //序列化
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        //producer
        KafkaProducer<String, String> producer = new KafkaProducer<>(properties);
        for (int i = 0; i < 5; i++) {
            producer.send(new ProducerRecord<>("first", "hello我的爱: " + i));
        }
        producer.close();
    }

    /**
     * 异步发送回调
     */
    @Test
    public void test_async_send_callback() {
        Properties properties = new Properties();
        //server
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS);
        //序列化
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        //producer
        KafkaProducer<String, String> producer = new KafkaProducer<>(properties);
        for (int i = 0; i < 5; i++) {
            producer.send(new ProducerRecord<>("first", "hello你的爱: " + i), new Callback() {
                @Override
                public void onCompletion(RecordMetadata metadata, Exception e) {
                    if (e == null) {
                        System.out.println("topic : " + metadata.topic() + " partition : " + metadata.partition() + " 发送成功!");
                    }
                }
            });
        }
        producer.close();
    }

    /**
     * 同步
     */
    @Test
    public void test_sync_send() throws ExecutionException, InterruptedException {
        Properties properties = new Properties();
        //server
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS);
        //序列化
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        //producer
        KafkaProducer<String, String> producer = new KafkaProducer<>(properties);
        for (int i = 0; i < 5; i++) {
            producer.send(new ProducerRecord<>("first", "hello你的爱: " + i)).get();
        }
        producer.close();
    }

    /**
     * 自定义分区管理
     */
    @Test
    public void test_async_partitioner() {
        Properties properties = new Properties();
        //server
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS);
        //序列化
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        //分区
        properties.put(ProducerConfig.PARTITIONER_CLASS_CONFIG, MyPartitioner.class);
        //producer
        KafkaProducer<String, String> producer = new KafkaProducer<>(properties);
        for (int i = 0; i < 5; i++) {
            //通过value值达到不同的partition
            producer.send(new ProducerRecord<>("first", "helllo你的爱: " + i), new Callback() {
                @Override
                public void onCompletion(RecordMetadata metadata, Exception e) {
                    if (e == null) {
                        System.out.println("topic : " + metadata.topic() + " partition : " + metadata.partition() + " 发送成功!");
                    }
                }
            });
        }
        producer.close();
    }

    /**
     * 配置参数提高producer吞吐量
     */
    @Test
    public void test_async_param() {
        Properties properties = new Properties();
        //server
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS);
        //序列化
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        //缓冲区大小
        properties.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
        //linger.ms (参考5-100ms)
        properties.put(ProducerConfig.LINGER_MS_CONFIG, 2);
        //压缩
        properties.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, "snappy");
        //批次大小
        properties.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
        //producer
        KafkaProducer<String, String> producer = new KafkaProducer<>(properties);
        for (int i = 0; i < 5; i++) {
            //通过value值达到不同的partition
            producer.send(new ProducerRecord<>("first", "hello你的爱: " + i), new Callback() {
                @Override
                public void onCompletion(RecordMetadata metadata, Exception e) {
                    if (e == null) {
                        System.out.println("topic : " + metadata.topic() + " partition : " + metadata.partition() + " 发送成功!");
                    }
                }
            });
        }
        producer.close();
    }


    /**
     * 异步发送回调
     */
    @Test
    public void test_async_send_acks() {
        Properties properties = new Properties();
        //server
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS);
        //序列化
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        //acks
        properties.put(ProducerConfig.ACKS_CONFIG, "1");
        //retries
        properties.put(ProducerConfig.RETRIES_CONFIG, 3);
        //producer
        KafkaProducer<String, String> producer = new KafkaProducer<>(properties);
        for (int i = 0; i < 500; i++) {
            producer.send(new ProducerRecord<>("first", "hello你的爱: " + i), new Callback() {
                @Override
                public void onCompletion(RecordMetadata metadata, Exception e) {
                    if (e == null) {
                        System.out.println("topic : " + metadata.topic() + " partition : " + metadata.partition() + " 发送成功!");
                    }
                }
            });
        }
        producer.close();
    }


    /**
     * 事务消息
     */
    @Test
    public void test_async_send_transaction() {
        Properties properties = new Properties();
        //server
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS);
        //序列化
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        //开启幂等
        properties.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, true);
        //事务配置
        properties.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, "my_transactional_id_001");
        //producer
        KafkaProducer<String, String> producer = new KafkaProducer<>(properties);
        //初始化事务(获取producer id)
        producer.initTransactions();
        //开始
        producer.beginTransaction();

        try {
            for (int i = 0; i < 5000; i++) {
                producer.send(new ProducerRecord<>("first", "hello你的爱: " + i), new Callback() {
                    @Override
                    public void onCompletion(RecordMetadata metadata, Exception e) {
                        if (e == null) {
                            System.out.println("topic : " + metadata.topic() + " partition : " + metadata.partition() + " 发送成功!");
                        }
                    }
                });
            }
            //事务提交
            producer.commitTransaction();
        } catch (Exception e) {
            e.printStackTrace();
            //消息异常回退
            producer.abortTransaction();
        } finally {
            producer.close();
        }

    }

    /**
     * 发送数据到指定分区
     */
    @Test
    public void test_async_send_partition() {
        Properties properties = new Properties();
        //server
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS);
        //序列化
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        //producer
        KafkaProducer<String, String> producer = new KafkaProducer<>(properties);
        for (int i = 0; i < 5; i++) {
            producer.send(new ProducerRecord<>("first", 0, "", "hello你的爱: " + i), new Callback() {
                @Override
                public void onCompletion(RecordMetadata metadata, Exception e) {
                    if (e == null) {
                        System.out.println("topic : " + metadata.topic() + " partition : " + metadata.partition() + " 发送成功!");
                    }
                }
            });
        }
        producer.close();
    }
}
