package com.linkstec.kafka;

import java.util.Collection;
import java.util.HashSet;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ExecutionException;

import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.admin.KafkaAdminClient;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.DescribeClusterResult;
import org.apache.kafka.clients.admin.DescribeTopicsResult;
import org.apache.kafka.clients.admin.ListTopicsResult;
import org.apache.kafka.clients.admin.TopicDescription;
import org.apache.kafka.common.KafkaFuture;
import org.apache.kafka.common.Node;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;

public class kafkaAdminClientTest {
	private static final Logger logger = LogManager.getLogger();

	private static final String bootstrapServers = "192.168.2.232:9092,192.168.2.233:9092,192.168.2.234:9092";
	private static KafkaAdminClient adminClient;
	
	public static void main(String[] args) {
		Properties properties = new Properties();
	    properties.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
	    adminClient = (KafkaAdminClient) AdminClient.create(properties);
	    
	    //列出所有的topic
	    ListTopicsResult listTopicsResult = adminClient.listTopics();
	    Set<String> topics = null;
		try {
			topics = listTopicsResult.names().get();
		} catch (InterruptedException e) {
			e.printStackTrace();
		} catch (ExecutionException e) {
			e.printStackTrace();
		}
		logger.info("topic数量：" + topics.size());
        for (String topic : topics) {
        	logger.info(topic);
        }
        
        //列出所有节点
        DescribeClusterResult describeClusterResult = adminClient.describeCluster();
        Collection<Node> nodes = null;
		try {
			nodes = describeClusterResult.nodes().get();
		} catch (InterruptedException e) {
			e.printStackTrace();
		} catch (ExecutionException e) {
			e.printStackTrace();
		}
		for (Node node : nodes) {
			logger.info(node);
		}
		//列出topic信息
		Set<String> topicNames = new HashSet<String>();
		topicNames.add("test");
		topicNames.add("BankingSecuritiesTransfer");
		DescribeTopicsResult describeTopicsResult = adminClient.describeTopics(topicNames);
		
		for (KafkaFuture<TopicDescription> TopicDescriptionFuture : describeTopicsResult.values().values()) {
			TopicDescription topicDescription = null;
			try {
				topicDescription = TopicDescriptionFuture.get();
			} catch (InterruptedException e) {
				e.printStackTrace();
			} catch (ExecutionException e) {
				e.printStackTrace();
			}
			logger.info(topicDescription);
		}
		
		
	}
}