package cn.getech.data.development.utils;

import cn.getech.data.development.model.dto.ConfConnectDto;
import cn.getech.data.intelligence.common.exception.RRException;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.util.Arrays;
import java.util.Properties;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;

/**
 * @company: getech[tcl]
 * @author: hujz
 * @Date: 2019/8/29 14:52  update 2019/9/9 14:25
 * @Description: 获取kafka连接以及数据
 */
public class MyKafkaUtils {

    /***
     * 保存需要释放连接的kafka消费者
     * key: analysisId_cofconnectId
     */
    private static ConcurrentHashMap<String, KafkaConsumer<String, String>> kafkaConsumerMap = new ConcurrentHashMap<>();


    /**
     * @Description 初始化消费者
     * @Author hujz
     * @Date 2019/9/9 14:17
     * @Param confConnect kafka连接信息
     * @reture uuid
     */
    public static String initKafKaConsumer(ConfConnectDto confConnectDto) {
        Properties props = new Properties();
        String groupUuid = UUID.randomUUID().toString();
        props.put("bootstrap.servers", confConnectDto.getKafkaUrl());
        //并不能消费掉这笔数据
        props.put("enable.auto.commit", "false");
        props.put("auto.commit.interval.ms", "200");
        props.put("session.timeout.ms", "30000");
//        props.put("auto.offset.reset", "earliest");
//        props.put("auto.offset.reset", "latest");
        props.put("group.id", "cn.getech.test." + groupUuid);
        props.put("key.deserializer", StringDeserializer.class.getName());
        props.put("value.deserializer", StringDeserializer.class.getName());
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
        //消费者订阅topic
//        consumer.subscribe(Arrays.asList(confConnectDto.getTopic()));

//        consumer.assign(Arrays.asList(new TopicPartition(confConnectDto.getTopic(), 0)));
//        consumer.seekToBeginning(Arrays.asList(new TopicPartition(confConnectDto.getTopic(), 0)));

        //暂定是从0开始的partition
        TopicPartition partition = new TopicPartition(confConnectDto.getTopic(), 0);
        consumer.assign(Arrays.asList(partition));
        consumer.seekToEnd(Arrays.asList(partition));
        //获取到当前分区对应的offset
        long position = consumer.position(partition);
        consumer.assign(Arrays.asList(partition));
        consumer.seek(partition, position == 0 ? 0 : position - 1);

        //进行保存
        String key = groupUuid + "_" + confConnectDto.getId();
        kafkaConsumerMap.putIfAbsent(key, consumer);
        return groupUuid;
    }


    /**
     * @Description 初始化flink消费者
     * @Author hujz
     * @Date 2019/9/9 14:17
     * @Param confConnect kafka连接信息
     * @reture uuid
     */
    public static KafkaConsumer<String, String> initFlinkKafKaConsumer(String groupId, String topic, String kafkaUrl) {
        Properties props = new Properties();
        props.put("bootstrap.servers", kafkaUrl);
        props.put("enable.auto.commit", "true");
        props.put("auto.commit.interval.ms", "200");
        props.put("session.timeout.ms", "30000");
        props.put("auto.offset.reset", "earliest");
        props.put("group.id", groupId);
        props.put("key.deserializer", StringDeserializer.class.getName());
        props.put("value.deserializer", StringDeserializer.class.getName());
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
        //消费者订阅topic
        consumer.subscribe(Arrays.asList(topic));
        return consumer;
    }

    /**
     * @Description 获取对应的 kafkaConsumer
     * @Author hujz
     * @Date 2019/9/9 14:35
     * @Param
     */
    public static KafkaConsumer<String, String> getKafkaConsumerByConnectId(String key) {
        KafkaConsumer<String, String> stringStringKafkaConsumer = kafkaConsumerMap.get(key);
        return stringStringKafkaConsumer;
    }

    /**
     * @Description 关闭对应的kafka消费者
     * @Author hujz
     * @Date 2019/9/9 14:37
     * @Param
     */
    public static void closeKafkaConsumer(String key) {
        KafkaConsumer<String, String> kafkaConsumer = getKafkaConsumerByConnectId(key);
        if (null != kafkaConsumer) {
            kafkaConsumerMap.remove(key);
            kafkaConsumer.close();
        }
    }


    /**
     * @Description 在规定的时间内, 获取最近的一条数据
     * @Author hujz
     * @Date 2019/8/29 15:23
     */
    public static String getKafkaLastData(long timeoutMs, String key) {
        KafkaConsumer<String, String> stringStringKafkaConsumer = getKafkaConsumerByConnectId(key);
        if (null == stringStringKafkaConsumer) {
            throw new RRException("获取连接失败!confConectId:" + key);
        }
        try {
            long currentStartTime = System.currentTimeMillis();
            long currentEndTime = System.currentTimeMillis();
            String res = "";
            while (true) {
                if (currentEndTime - currentStartTime >= timeoutMs) {
                    return res;
                }
                ConsumerRecords<String, String> records = stringStringKafkaConsumer.poll(100);
                for (ConsumerRecord<String, String> record : records) {
//                    System.out.println(String.format("topic:%s,offset:%d,消息:%s",
//                            record.topic(), record.offset(), record.value()));
                    return record.value();
                }
                currentEndTime = System.currentTimeMillis();
            }
        } catch (Exception e) {
            throw new RRException("消费kafka数据失败：" + e);
        } finally {
            kafkaConsumerMap.remove(key);
            stringStringKafkaConsumer.close();
        }
    }

    /**
     * 获取全部需要进行处理的数据
     */
    public static ConcurrentHashMap<String, KafkaConsumer<String, String>> listKafkaConsumerMap() {
        return kafkaConsumerMap;
    }

}
