package team.bluepen.supermarket.service;

import org.apache.kafka.clients.admin.Admin;
import org.apache.kafka.clients.admin.KafkaAdminClient;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.common.serialization.StringSerializer;
import team.bluepen.supermarket.SupermarketStatSystemApplication;
import team.bluepen.supermarket.conf.KafkaProperties;
import team.bluepen.supermarket.data.entity.Product;
import team.bluepen.supermarket.util.RandomProductGenerator;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;

/**
 * @author RollW
 */
public class ProductPusher {
    private static final Serializer<String> STRING_SERIALIZER = new StringSerializer();

    private final RandomProductGenerator generator;
    private final Producer<String, String> producer;
    private final String topic;
    private final Consumer<List<Product>> onPush;

    public ProductPusher(String path,
                         KafkaProperties kafkaProperties,
                         String topic,
                         Consumer<List<Product>> onPush) throws IOException {
        this.topic = topic;
        Properties properties = new Properties();
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaProperties.getBootstrapServers());

        createTopic(properties, topic);
        producer = new KafkaProducer<>(properties, STRING_SERIALIZER, STRING_SERIALIZER);
        generator = new RandomProductGenerator(
                SupermarketStatSystemApplication.loadResource(path)
        );
        this.onPush = onPush;
    }

    private void createTopic(Properties properties, String topic) {
        Admin adminClient = KafkaAdminClient.create(properties);
        adminClient.createTopics(
                Collections.singletonList(new NewTopic(topic, 1, (short) 1)));
        adminClient.close();
    }

    private final ScheduledExecutorService scheduledExecutorService =
            Executors.newSingleThreadScheduledExecutor();

    public void start() {
         scheduledExecutorService.schedule(
                 this::push100,
                 5, TimeUnit.SECONDS
         );
    }

    public void stop() {
        scheduledExecutorService.shutdown();
        producer.close();
    }

    private Product pushProductMessage() {
        Product product = generator.next();
        final String serialized = product.serialize();
        ProducerRecord<String, String> record = new ProducerRecord<>(
                topic,
                serialized);
        producer.send(record);
        return product;
    }

    private void push5() {
        push(5);
    }

    private void push100() {
        push(100);
    }

    private void push(int number) {
        List<Product> products = new ArrayList<>();
        for (int i = 0; i < number; i++) {
            Product product = pushProductMessage();
            products.add(product);
        }
        producer.flush();
        onPush.accept(products);
    }

    public String getTopic() {
        return topic;
    }
}
