package com.zws.cucumber.glue;

import com.zws.cucumber.expander.ZwsStr;
import com.zws.cucumber.util.colorfullog.ZwsColorfulLogger;
import com.zws.cucumber.var.ZwsVar;
import io.cucumber.datatable.DataTable;
import io.cucumber.java.After;
import io.cucumber.java.Before;
import io.cucumber.java.en.Given;
import io.cucumber.java.en.Then;
import io.cucumber.java.en.When;
import net.javacrumbs.jsonunit.JsonAssert;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.junit.jupiter.api.Assertions;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ConsumerAwareMessageListener;
import org.springframework.kafka.listener.ContainerProperties;
import org.springframework.kafka.listener.KafkaMessageListenerContainer;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.utils.ContainerTestUtils;
import org.springframework.kafka.test.utils.KafkaTestUtils;

import javax.annotation.PostConstruct;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.stream.Stream;

import static com.zws.cucumber.expander.ZwsStr.expand;
import static net.javacrumbs.jsonunit.JsonAssert.when;
import static net.javacrumbs.jsonunit.core.Option.IGNORING_ARRAY_ORDER;
import static org.junit.jupiter.api.Assertions.assertEquals;

/**
 * author: zws
 */
public class ZwsKafkaStepDef {

    private static final Logger logger = ZwsColorfulLogger.of(ZwsKafkaStepDef.class);

    @Autowired(required = false)
    private EmbeddedKafkaBroker embeddedKafkaBroker;

    private Map<String, Object> configs;
    private DefaultKafkaConsumerFactory<String, String> factory;
    private ConsumerAwareMessageListener<String, String> listener;
    private KafkaMessageListenerContainer<String, String> container;
    private Map<String, Object> producerConfigs;
    private KafkaProducer<String, Object> producer;

    private final Map<String, BlockingQueue<ConsumerRecord<String, String>>> topic2Records = new ConcurrentHashMap<>();

    @PostConstruct
    public void init() {
        if (embeddedKafkaBroker == null) {
            return;
        }
        configs = new HashMap<>(
                KafkaTestUtils.consumerProps("kafka-cucumber-ct-group", "true", embeddedKafkaBroker));
        factory = new DefaultKafkaConsumerFactory<>(configs, new StringDeserializer(), new StringDeserializer());
        listener = (r, c) -> {
            String topic = r.topic();
            BlockingQueue<ConsumerRecord<String, String>> queue =
                    topic2Records.computeIfAbsent(topic, t -> new LinkedBlockingQueue<>());
            queue.add(r);
            c.commitSync();
            logger.debug("receive kafka message, topic: <y>{}</y>, key: <y>{}</y>, record: {}",
                    r.topic(), r.key(), r);
        };
        producerConfigs = KafkaTestUtils.producerProps(embeddedKafkaBroker);
        producerConfigs.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        producerConfigs.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        producerConfigs.put("linger.ms", 1000); // 1 second
        producerConfigs.put("batch.size", 1024 * 1024); // 1 M
        producer = new KafkaProducer<>(producerConfigs);
    }

    @Before("@KafkaTestBefore")
    @After("@KafkaTestAfter")
    public void setUpAndTearDown() {
        if (embeddedKafkaBroker != null) {
            if (container != null) {
                logger.debug("stop kafka container");
                container.stop();
                container = null;
            }
            logger.debug("clear kafka records");
            topic2Records.clear();
        }
    }
    
    @Given("close kafka container")
    public void stopKafkaContainer() {
    	if (embeddedKafkaBroker != null) {
            if (container != null) {
                logger.debug("stop kafka container");
                container.stop();
                container = null;
            }
            producer.close();
            embeddedKafkaBroker.destroy();
            logger.debug("clear kafka records");
            topic2Records.clear();
        }
    }
    
    @Given("prepare kafka and subscribe topic")
    public void prepareKafkaAndSubscribeTopic(DataTable dataTable) {
        if (embeddedKafkaBroker == null) {
            logger.error("kafka is not started");
            throw new IllegalStateException();
        }
        if (container != null) {
            logger.error("kafka container is already prepared");
            throw new IllegalStateException();
        }
        String[] topics = dataTable.asList()
                .stream()
                .map(ZwsStr::expand)
                .toArray(String[]::new);

        Stream.of(topics)
                .forEach(t -> topic2Records.putIfAbsent(t, new LinkedBlockingQueue<>()));
        ContainerProperties properties = new ContainerProperties(topics);
        container = new KafkaMessageListenerContainer<>(factory, properties);
        container.setupMessageListener(listener);
        container.start();
        logger.debug("embeddedKafkaBroker.getPartitionsPerTopic(): {}", embeddedKafkaBroker.getPartitionsPerTopic());
        ContainerTestUtils.waitForAssignment(container, embeddedKafkaBroker.getPartitionsPerTopic());
    }

    @Then("verify kafka message from topic {string} with key {string}")
    public void verifyKafkaMessageFromTopicWithKey(String topic, String expectedKey, String expectedMessage) throws Exception {
        String expandedExpectedKey = expand(expectedKey);
        ConsumerRecord<String, String> record = pollRecordFromTopicWithKey(topic, expandedExpectedKey);
        Assertions.assertNotNull(record, "Did not receive kafka message from topic " + topic);
        assertEquals(expandedExpectedKey, record.key());
        JsonAssert.assertJsonEquals(expand(expectedMessage), record.value(), when(IGNORING_ARRAY_ORDER));
        ZwsVar.put("__kafka_message__", record.value());
    }

    @Then("verify kafka message of plaintext from topic {string} with key {string}")
    public void verifyKafkaMessageOfPlaintextFromTopicWithKey(String topic, String expectedKey, String expectedMessage) throws Exception {
        String expandedExpectedKey = expand(expectedKey);
        ConsumerRecord<String, String> record = pollRecordFromTopicWithKey(topic, expandedExpectedKey);
        Assertions.assertNotNull(record, "Did not receive kafka message from topic " + topic);
        assertEquals(expandedExpectedKey, record.key());
        Assertions.assertEquals(expand(expectedMessage), record.value());
        ZwsVar.put("__kafka_message__", record.value());
    }

    private ConsumerRecord<String, String> pollRecordFromTopicWithKey(String topic, String key)
            throws InterruptedException {
        ConsumerRecord<String, String> record = null;
        BlockingQueue<ConsumerRecord<String, String>> records = topic2Records.get(topic);
        for (int i = 0; i < 250; i++, TimeUnit.MILLISECONDS.sleep(10)) {
            record = records.stream()
                    .filter(r -> Objects.equals(key, r.key()))
                    .findFirst()
                    .orElse(null);
            if (record != null) {
                records.remove(record);
                return record;
            }
        }
        return null;
    }

    @When("send kafka message to topic {string} with key {string}")
    public void sendMessageToTopic(String topic, String key, String message) {
        producer.send(new ProducerRecord<>(expand(topic), expand(key), expand(message)));
        producer.flush();
    }

    @When("send kafka message to topic {string}")
    public void sendMessageToTopic(String topic, DataTable dataTable) {
        topic = expand(topic);
        for (List<String> row : dataTable.cells()) {
            producer.send(new ProducerRecord<>(expand(topic), expand(row.get(0)), expand(row.get(1))));
        }
        producer.flush();
    }
}
