package pumpIn.client;

import com.fasterxml.jackson.databind.deser.std.StringDeserializer;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.common.TopicPartition;
import pumpIn.client.type.ClientType;

import java.time.Duration;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;

@Data
@Slf4j
public class KafkaClientDeserted extends Client {

    private String bootstrapServers, keyDeserializer, valueDeserializer, groupId, topic;

    private String keyFilePath;

    private Map<String, Object> infoMap;
    private KafkaConsumer consumer;

    private boolean isClosed;

    @Override
    public boolean start() {
        return false;
    }

    @Override
    public List<String> pumpIn() {
        List<String> recordList = null;
        recordList = new ArrayList<>();
        while (isClosed) {

            ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(10));
            if (!records.isEmpty()) {
                for (TopicPartition partition : records.partitions()) {
                    List<ConsumerRecord<String, String>> partitionRecords = records.records(partition);
                    if (null == partitionRecords || partitionRecords.size() == 0) {
//                        Thread.sleep(100);
                        continue;
                    }
                    for (ConsumerRecord<String, String> record : partitionRecords) {
                        String recordStr = record.value();
                        recordList.add(recordStr);
                    }
                }


                consumer.commitAsync(new OffsetCommitCallback() {
                    @Override
                    public void onComplete(Map<TopicPartition, OffsetAndMetadata> map, Exception e) {
                        if (e == null) {
                            log.debug("Success to commit offset [{}]", map);
                        } else {
                            log.error("Failed to commit offset [{}]", e.getMessage(), e);
                        }
                    }
                });
            }

        }
        return recordList;
    }

    @Override
    public KafkaClientDeserted initClient(Properties config) {
        if (null != config & !config.isEmpty()) {
            this.type = ClientType.KAFKA;
            this.bootstrapServers = config.getProperty("bootstrap.servers");
            this.groupId = config.getProperty("group_id", "igroup");
            this.groupId = config.getProperty("topic");

            this.keyDeserializer = config.getProperty("key_deserializer", StringDeserializer.class.getName());
            this.valueDeserializer = config.getProperty("value_deserializer", StringDeserializer.class.getName());

            config.setProperty("enable.auto.commit", "false");

            this.infoMap = new ConcurrentHashMap<>();
            this.infoMap.put("type", this.type);
            this.infoMap.put("bootstrap.servers", this.bootstrapServers);
            this.infoMap.put("groupId", this.groupId);
            this.infoMap.put("topic", this.topic);
            this.infoMap.put("key.deserializer", this.keyDeserializer);
            this.infoMap.put("value.deserializer", this.valueDeserializer);
            this.infoMap.put("enable.auto.commit", "false");

            this.consumer = new KafkaConsumer(config);
            this.consumer.subscribe(Arrays.asList(topic));
            this.isClosed = false;
        }


        return this;
    }


    @Override
    public Map<String, Object> getMetricInfo() {

        return this.infoMap;
    }

    @Override
    public boolean close() {
        this.consumer.close();
        this.isClosed = true;
        return this.isClosed();
    }
}
