package com.panlm.kafka.producer;

import org.apache.kafka.clients.producer.*;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.util.Properties;

public class CustomProducer {

    public static String KAFKA_CLUSTER = "hadoop102:9092,hadoop103:9092,hadoop104:9092";
    private KafkaProducer<String, String> producer;

    @Before
    public void before() {
        Properties properties = new Properties();
        //kafka 集群，broker-list
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA_CLUSTER);
        //ack 应答机制
        properties.put(ProducerConfig.ACKS_CONFIG, "all");
        //重试次数
        properties.put(ProducerConfig.RETRIES_CONFIG, 1);
        //批次大小
        properties.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
        //等待时间
        properties.put(ProducerConfig.LINGER_MS_CONFIG, 1);
        //RecordAccumulator缓冲区大小
        properties.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);

        //设置key和vulue序列化器
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
        producer = new KafkaProducer<>(properties);
    }

    @After
    public void after() {
        producer.close();
    }

    //不带回调函数的API
    @Test
    public void testSend() {
        for (int i=0; i<10; i++) {
            //每条数据都要封装成一个ProducerRecord对象
            producer.send(new ProducerRecord<>("bigdata", Integer.toString(i), Integer.toString(i)));
        }
    }

    //带回调函数的API
    @Test
    public void testSendCallback() {
        for (int i = 0; i < 10; i++) {
            producer.send(new ProducerRecord<>("bigdata", Integer.toString(i), Integer.toString(i)), new Callback() {
                //回调函数，该方法会在Producer收到ack时调用，为异步调用
                @Override
                public void onCompletion(RecordMetadata metadata, Exception e) {
                    if (e == null) {
                        System.out.println("success->" + metadata.offset());
                    } else {
                        e.printStackTrace();
                    }
                }
            });
        }
    }

    //同步发送API
    @Test
    public void testSyncSend() {
        for (int i = 0; i < 100; i++) {
            try {
                producer.send(new ProducerRecord<>("bigdata", Integer.toString(i), Integer.toString(i))).get();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    }
}
