package kafka.demo;

import com.google.common.collect.Lists;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.apache.kafka.clients.admin.CreateTopicsResult;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.header.Header;
import org.apache.kafka.common.header.internals.RecordHeader;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.apache.kafka.common.serialization.StringSerializer;

import java.nio.charset.Charset;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;

/**
 * @author jiangwz
 * @create 2022/3/9.
 */
public class ProducerDemo {

    private KafkaProducer<String, byte[]> producer;

    public void producerInit() {
        Properties props = new Properties();
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
        props.put(ProducerConfig.RETRIES_CONFIG, 1);
        props.put(ProducerConfig.ACKS_CONFIG, "all");
        props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
        props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
        props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
        props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, "none");
        props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, 1048576);
        props.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, 5);
        producer = new KafkaProducer<>(props);

    }

    public KafkaProducer<String, byte[]> getProducer(){
        return producer;
    }

    private AdminClient client;

    public void adminInit(){
        Properties props = new Properties();
        props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
        props.put(AdminClientConfig.RETRIES_CONFIG, 1);
        client = AdminClient.create(props);
    }

    public void createTopicIfNotExists(String topic){
        Map<String, String> topicConfigs = new HashMap<>();
        NewTopic newTopic = new NewTopic(topic, 1, (short)1).configs(topicConfigs);
        try {
            createTopic(newTopic).values().get(topic).get();
        } catch (InterruptedException e) {
            e.printStackTrace();
        } catch (ExecutionException e) {
            e.printStackTrace();
        }
    }

    public CreateTopicsResult createTopic(NewTopic topic) {
        return client.createTopics(Collections.singletonList(topic));
    }

    public static byte[] strToBytes(String msg){
        return msg.getBytes(Charset.forName("UTF-8"));

    }

    public static void main(String[] args){
        ProducerDemo producer = new ProducerDemo();
        producer.producerInit();
        producer.adminInit();
        producer.createTopicIfNotExists("test01");
//        producer.createTopicIfNotExists("test02");

        KafkaProducer<String, byte[]> kafkaProducer = producer.getProducer();

        for (int i = 0; i < 100; i++) {
            String msg = "hello" + i;
            Header header = new RecordHeader("requestTopic", strToBytes("just"));
            ProducerRecord<String, byte[]> record = new ProducerRecord<String, byte[]>("test01",
                    null,
                    null,
                    strToBytes(msg),
                    Lists.newArrayList(header)
            );

            kafkaProducer.send(record, (recordMetadata, e) -> {
                if (e == null) {
                    System.out.println("callback="+recordMetadata);
                }else {
                    e.printStackTrace();
                }
            });
        }

        try {
            TimeUnit.SECONDS.sleep(20);
        } catch (InterruptedException e) {
            e.printStackTrace();
        }


    }

}
