package hn.cch.kafka;

import hn.cch.KafkaApp;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.time.Duration;
import java.util.Collections;
import java.util.Properties;

public class ConsumerRunnable implements Runnable {

    private static final Logger logger = LoggerFactory.getLogger(ConsumerRunnable.class);


    private String server;
    private String topic;

    public ConsumerRunnable() {
    }

    public ConsumerRunnable(String server, String topic) {
        this.server = server;
        this.topic = topic;
    }

    public String getServer() {
        return server;
    }

    public void setServer(String server) {
        this.server = server;
    }

    public String getTopic() {
        return topic;
    }

    public void setTopic(String topic) {
        this.topic = topic;
    }

    @Override
    public void run() {
        Properties properties = new Properties();
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, server);
        // 反序列化
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);

        properties.put(ConsumerConfig.GROUP_ID_CONFIG, "ConsumerGroup");

        // 自动提交
        properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
        properties.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000");
        // 心跳间隔时间
        properties.put(ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG, "3000");
        // 最大空闲时间
        properties.put(ConsumerConfig.CONNECTIONS_MAX_IDLE_MS_CONFIG, "540000");
        // 自动重置索引
        properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

        try (KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(properties)) {
            while (true) {
                kafkaConsumer.subscribe(Collections.singletonList(topic));
                ConsumerRecords<String, String> consumerRecords = kafkaConsumer.poll(
                        Duration.ofMillis(KafkaApp.timeout));
                if (consumerRecords.isEmpty()){
                    break;
                }
                for (ConsumerRecord<String, String> consumerRecord : consumerRecords) {
                    int partition = consumerRecord.partition();
                    long offset = consumerRecord.offset();
                    String key = consumerRecord.key();
                    String value = consumerRecord.value();
                    logger.info("{}:{}:{}:{}", partition, offset, key, value);
                }
            }
        } catch (Exception e) {
            logger.info(e.getMessage());
        }
    }

}
