package com.margo.project.consumer.kafka.client.controller;

import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;

import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.CreateTopicsResult;
import org.apache.kafka.clients.admin.DescribeTopicsResult;
import org.apache.kafka.clients.admin.ListTopicsResult;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.admin.TopicDescription;
import org.apache.kafka.common.KafkaFuture;
import org.apache.kafka.common.Node;
import org.apache.kafka.common.TopicPartitionInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

@RestController
@RequestMapping("/kafka/admin")
public class MargoKafkaAdminController {
	
	private Logger logger=LoggerFactory.getLogger(this.getClass());

	@Autowired
    private AdminClient adminClient;
	
	@RequestMapping("/list")
	public Set<String> topicList() throws InterruptedException, ExecutionException{
		ListTopicsResult listTopicsResult=adminClient.listTopics();
		KafkaFuture<Set<String>> kafkaFuture=listTopicsResult.names();
		return kafkaFuture.get();
		
	}
	
	@RequestMapping("/info")
	public String info() throws InterruptedException, ExecutionException{
		List<String> list=new LinkedList<String>();
		//list.add("springCloudBus");
		list.add("margo-kafka-topic");
		DescribeTopicsResult describeTopicsResult=adminClient.describeTopics(list);
		Map<String, KafkaFuture<TopicDescription>> map=describeTopicsResult.values();
		map.forEach((k,v)->{
			logger.info("k:"+k);
			TopicDescription topicDescription=null;
			try {
				topicDescription=v.get();
				logger.info("主题名称："+topicDescription.name());
				
				List<TopicPartitionInfo> lstTopicPartitionInfo=topicDescription.partitions();
				lstTopicPartitionInfo.forEach(topicPartitionInfo->{
					logger.info("*********************************************");
					int partition=topicPartitionInfo.partition();
					logger.info("分区id："+partition);
					
					Node leader=topicPartitionInfo.leader();
					logger.info("----------------------------------------");
					logger.info("首领分区");
					logger.info("首领分区id："+leader.id());
					logger.info("首领分区host："+leader.host());
					logger.info("首领分区port："+leader.port());
					
					logger.info("分区所有副本");
					List<Node> lstNode=topicPartitionInfo.replicas();
					lstNode.forEach(node->{
						logger.info("----------------------------------------");
						logger.info("副本分区id："+node.id());
						logger.info("副本分区host："+node.host());
						logger.info("副本分区port："+node.port());
					});
					
					logger.info("分区所有isr副本");
					List<Node> lstIsrNode=topicPartitionInfo.replicas();
					lstIsrNode.forEach(node->{
						logger.info("----------------------------------------");
						logger.info("isr副本分区id："+node.id());
						logger.info("isr副本分区host："+node.host());
						logger.info("isr副本分区port："+node.port());
					});
					
					
				});
				
				
			} catch (InterruptedException | ExecutionException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		});
		
		return map.get("margo-kafka-topic").get().toString();
		
	}
	
	
	@RequestMapping("/add")
	public int add() throws InterruptedException, ExecutionException{
		NewTopic topic=new NewTopic("margo-kafka-topic", 2, (short) 1);
		List<NewTopic> topicList=new ArrayList<NewTopic>();
		topicList.add(topic);
		CreateTopicsResult createTopicresult=adminClient.createTopics(topicList);
		createTopicresult.all();
		return 1;
		
	}
}
