package com.syher.seconds.kill.kafka.core.kafka;

import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import joptsimple.internal.Strings;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.stream.Stream;

public class ConsumerGroup {
    private static Logger LOGGER = LoggerFactory.getLogger(ConsumerGroup.class);

    private ScheduledExecutorService executorService;
    private KafkaConsumer<String, String> consumer;

    private List<ConsumerTask> tasks;

    private static class SINGLETON {
        private static final ConsumerGroup group = new ConsumerGroup();

        public static ConsumerGroup singleton() {
            return group;
        }
    }

    public static ConsumerGroup builder() {
        return SINGLETON.singleton();
    }

    public ConsumerGroup configure(Integer threadNum, String groupId, String topic, String brokerList) {
        Preconditions.checkArgument(!Strings.isNullOrEmpty(groupId), "groupId must be specify.");
        Preconditions.checkArgument(!Strings.isNullOrEmpty(topic), "topic must be specify.");
        Preconditions.checkArgument(!Strings.isNullOrEmpty(brokerList), "brokerList must be specify.");
        setConsumer(groupId, topic, brokerList);

        Preconditions.checkArgument(threadNum != null, "threadNum must be specify.");
        this.executorService = Executors.newScheduledThreadPool(threadNum);

        tasks = Lists.newArrayListWithExpectedSize(threadNum);
        Stream.iterate(1, i -> i + 1).limit(threadNum).forEach(i -> tasks.add(new ConsumerTask(this.consumer)));
        return this;
    }

    private void setConsumer(String groupId, String topic, String brokerList) {
        Properties props = new Properties();
        props.put("bootstrap.servers", brokerList);
        props.put("group.id", groupId);
        props.put("enable.auto.commit", "true");
        props.put("auto.commit.interval.ms", "1000");
        props.put("session.timeout.ms", "30000");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

        this.consumer = new KafkaConsumer<String, String>(props);
        consumer.subscribe(Arrays.asList(topic));
    }

    public void execute() {
        tasks.forEach(task -> executorService.scheduleWithFixedDelay(task, 0, 1, TimeUnit.MILLISECONDS));
    }
}
