package phase04.module02.code.helloworld;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.header.Header;
import org.apache.kafka.common.header.internals.RecordHeader;
import org.apache.kafka.common.serialization.IntegerSerializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;

/**
 * @author Alex Zhang
 */
public class KafkaProducerTest {
    private static KafkaProducer<Integer, String> producer;

    @BeforeAll
    public static void init() {
        Map<String, Object> config = new HashMap<>(5);
        config.put("bootstrap.servers", "node1:9092");
        config.put("key.serializer", IntegerSerializer.class);
        config.put("value.serializer", StringSerializer.class);
        config.put("aks", "all");
        config.put("retries", "3");
        producer = new KafkaProducer<>(config);
    }

    @Test
    public void syncProduce() {
        List<Header> headers = new ArrayList<>();
        headers.add(new RecordHeader("biz.name", "kafkademo".getBytes()));
        ProducerRecord<Integer, String> record = new ProducerRecord<>("topic_1", 0, 0, "first log", headers);
        final Future<RecordMetadata> future = producer.send(record);
        RecordMetadata metadata = null;
        try {
            metadata = future.get();
        } catch (InterruptedException | ExecutionException e) {
            e.printStackTrace();
        }
        if (null != metadata) {
            System.out.println(metadata.topic());
            System.out.println(metadata.partition());
            System.out.println(metadata.offset());
        }
        producer.close();
    }

    @Test
    public void asyncProduce() {
        List<Header> headers = new ArrayList<>();
        headers.add(new RecordHeader("biz.name", "kafkademo".getBytes()));

        ProducerRecord<Integer, String> record = new ProducerRecord<>("topic_1", 0, 0, "2 log", headers);
        producer.send(record, (metadata, exception) -> {
            if(null == exception) {
                System.out.println(metadata.topic());
                System.out.println(metadata.partition());
                System.out.println(metadata.offset());
            } else {
                System.out.println(exception.getMessage());
            }
        });
        producer.close();
    }

}
