package com.example.dockercompose.dao;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.ByteArrayDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;

public class Kafka {
    private Logger logger = LoggerFactory.getLogger(Kafka.class);

    private static final String BOOT_STRAP_SERVERS = "kafka:9092";
    private static final String TOPIC = "test";

    public void pubSub() {
        KafkaProducer producer = createKafkaProducer();

        producer.send(new ProducerRecord(TOPIC, "key", "test"), ((metadata, exception) -> {
            if (exception != null) {
                logger.error("kafka send message error.", exception);
            } else {
                logger.info("send message ok");
            }
        }));

        KafkaConsumer consumer = createKafkaConsumer();
        consumer.subscribe(Arrays.asList(TOPIC));

        List<String> list = new ArrayList<>();
        while (list.isEmpty()) {
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(1000));
            for (ConsumerRecord<String, String> record : records) {
                logger.info(record.key());
                logger.info(record.value());
                list.add(record.value());
                logger.info("kafka subscribe ok.");
                break;
            }
        }
        assert list.get(0).equals("test");
    }

    private static KafkaConsumer createKafkaConsumer() {
        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOT_STRAP_SERVERS);
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group");
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
        properties.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, Integer.MAX_VALUE + "");
        properties.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "1000");
        properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
        return new KafkaConsumer(properties);
    }

    private static KafkaProducer createKafkaProducer() {
        Properties properties = new Properties();
        properties.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOT_STRAP_SERVERS);
        properties.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        properties.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        return new KafkaProducer(properties);
    }
}
