package com.sui.bigdata.rtcadmin.util;

import com.feidee.fdcommon.configuration.CustomConfiguration;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.PartitionInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;

import java.util.*;

/**
 * @Author yanjiantong
 * @Date 2019/12/30 16:42
 * @Description
 * @email jiantong_yan@sui.com
 */
@Component
public class KafkaTopicUtil {
    private static final Logger logger = LoggerFactory.getLogger(KafkaTopicUtil.class);

    /**
     * 从指定的topic中消费数据
     *
     * @param topic 名称
     * @return 消费到的数据
     */
    public static String consumerRecord(String topic) {
        int tag = 1;
        int loopTime = 5;
        Long pollTime = 1000L;
        Consumer consumer = getConsumer(CustomConfiguration.getString("kafka.bootstrap.servers"), "rtc-admin", false);
        try {
            if (consumer != null) {
                consumer.subscribe(Collections.singletonList(topic));
                while (tag <= loopTime) {
                    tag++;
                    ConsumerRecords<String, String> records = consumer.poll(pollTime);
                    for (ConsumerRecord<String, String> record : records) {
                        return record.value();
                    }
                }
            }
        } catch (Exception e) {
            logger.error("消费记录失败：" + e.getMessage());
            return null;
        } finally {
            consumer.close();
        }
        return null;
    }

    /**
     * 查询所有topic
     *
     * @return 所有topic
     */
    public static List<String> queryAllTopic() {
        List<String> allTopicList = new ArrayList<>();
        Consumer consumer = getConsumer(CustomConfiguration.getString("kafka.bootstrap.servers"), "rtc-admin", false);
        try {
            if (consumer != null) {
                Map<String, List<PartitionInfo>> topics = consumer.listTopics();
                if (null != topics) {
                    allTopicList.addAll(topics.keySet());
                }
            }
        } catch (Exception e) {
            logger.error("查询所有topic失败：" + e.getMessage());
            return null;
        } finally {
            consumer.close();
        }
        return allTopicList;
    }

    public static KafkaConsumer<String, String> getConsumer(String brokers, String groupId, boolean autoCommit) {
        logger.info("get kafka consumer>>>");
        Properties props = new Properties();
        props.put("bootstrap.servers", brokers);
        props.put("group.id", groupId);
        props.put("enable.auto.commit", autoCommit);
        props.put("auto.commit.interval.ms", CustomConfiguration.getString("kafka.auto.commit.interval.ms", "1000"));
        props.put("session.timeout.ms", CustomConfiguration.getString("kafka.session.timeout.ms", "30000"));
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("auto.offset.reset", CustomConfiguration.getString("kafka.auto.offset.reset", "earliest"));
        props.put("max.poll.records", CustomConfiguration.getInt("kafka.max.poll.records", 1));
        logger.info("kafka consumer config:" + props.toString());
        return new KafkaConsumer(props);
    }
}
