package com.vion.controller;

import com.vion.constant.Constants;
import com.vion.model.TopicPartitionVo;
import com.vion.util.JsonMessage;
import com.vion.util.JsonMessageUtil;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;

@RestController
public class KafkaOffsetController {

    @Autowired
    private ConsumerFactory consumerFactory;

    /**
     * @apiDefine kafka kafka
     */

    /**
     * @api {GET} getOffset 获取offset
     * @apiDescription 获取kafka消费者消费情况及消息总量
     * @apiGroup kafka
     * @apiVersion 1.0.0
     *
     * @apiSuccess {Number} code 返回编码
     * @apiSuccess {String} message 返回描述
     * @apiSuccess {Object[]} data 数据
     * @apiSuccess {String} data.topic topic
     * @apiSuccess {Number} data.partition 分区
     * @apiSuccess {Number} data.current 数据当前消费到的偏移量
     * @apiSuccess {Number} data.end 数据总量
     *
     */
    @GetMapping("/offset")
    public JsonMessage getOffset() {
        KafkaConsumer<String, Object> kafkaConsumer = new KafkaConsumer<>(consumerFactory.getConfigurationProperties());
        List<String> topics = new ArrayList<>();
        topics.add(Constants.KAFKA_VEHICLE);
        topics.add(Constants.KAFKA_ILLEGAL);
        topics.add(Constants.KAFKA_BEHAVIOR);
        topics.add(Constants.KAFKA_TFLOW);
        List<TopicPartitionVo> list = new ArrayList<>();
        for (String topic : topics) {
            List<PartitionInfo> test = kafkaConsumer.partitionsFor(topic);
            List<TopicPartition> partitionList = test.stream().map(info -> new TopicPartition(info.topic(), info.partition())).collect(Collectors.toList());
            Map<TopicPartition, Long> offsets = kafkaConsumer.endOffsets(partitionList);
            Set<TopicPartition> topicPartitions = offsets.keySet();
            Map<TopicPartition, OffsetAndMetadata> committed = kafkaConsumer.committed(topicPartitions);

            for (Map.Entry<TopicPartition, OffsetAndMetadata> topicPartitionOffsetAndMetadataEntry : committed.entrySet()) {
                TopicPartition topicPartition = topicPartitionOffsetAndMetadataEntry.getKey();
                OffsetAndMetadata value = topicPartitionOffsetAndMetadataEntry.getValue();
                TopicPartitionVo partition = new TopicPartitionVo();
                partition.setTopic(topicPartition.topic());
                partition.setPartition(topicPartition.partition());
                partition.setCurrent(value == null ? 0 : value.offset());
                list.add(partition);
            }
            for (Map.Entry<TopicPartition, Long> entry : offsets.entrySet()) {
                TopicPartition topicPartition = entry.getKey();
                for (TopicPartitionVo partition : list) {
                    if (partition.getTopic().equals(topicPartition.topic()) && partition.getPartition().equals(topicPartition.partition())) {
                        partition.setEnd(entry.getValue());
                    }
                }
            }
        }
        return JsonMessageUtil.getSuccessJsonMsg(list);
    }

}
