package com.rrd.cannl.Utils;

import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;

import org.apache.kafka.common.security.JaasUtils;

import com.alibaba.fastjson.JSONObject;
import com.rrd.cannl.sink.KafkaSink;

import kafka.admin.AdminUtils;
import kafka.admin.RackAwareMode;
import kafka.admin.TopicCommand;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;
import kafka.server.ConfigType;
import kafka.utils.ZkUtils;

public class KafkaUtil {
	/**
	 * 创建topic
	 * @throws IOException
	 */
	private static void createTopic(String topicName) throws IOException {
		Properties conf = new Properties();
		conf.load(KafkaSink.class.getClassLoader().getResourceAsStream("conf.properties"));
		 
		ZkUtils zkUtils = ZkUtils.apply(conf.getProperty("zookeeper.connect"), 30000, 30000, JaasUtils.isZkSecurityEnabled());
		// 创建一个单分区单副本名为t1的topic
		AdminUtils.createTopic(zkUtils, topicName, 1, 1, new Properties(), RackAwareMode.Enforced$.MODULE$);
		zkUtils.close();
		System.out.println("创建成功!");
	}
	private static  void queryTopic(String topicName) throws IOException { 
		Properties conf = new Properties();
		conf.load(KafkaSink.class.getClassLoader().getResourceAsStream("conf.properties"));

        ZkUtils zkUtils = ZkUtils.apply(conf.getProperty("zookeeper.connect"), 30000, 30000, JaasUtils.isZkSecurityEnabled());  
        // 获取topic 'test'的topic属性属性  
        Properties props = AdminUtils.fetchEntityConfig(zkUtils, ConfigType.Topic(), topicName);  
        // 查询topic-level属性  
        Iterator it = props.entrySet().iterator();  
        while (it.hasNext()) {  
            Map.Entry entry = (Map.Entry) it.next();  
            Object key = entry.getKey();  
            Object value = entry.getValue();  
            System.out.println(key + " = " + value);  
        }  
        zkUtils.close();  
	}
	public static void listTopic(){
		String[] options = new String[]{  
			    "--list",  
			    "--zookeeper",  
			    "172.16.2.90:2181,172.16.2.91:2181,172.16.2.148:2181"  
			};  
		TopicCommand.main(options);
	}
	
	public static void consumeMessage(String topicName) throws IOException{
		 Properties props = new Properties();
//	        props.load(KafkaSink.class.getClassLoader().getResourceAsStream("conf.properties"));
	        props.put("auto.offset.reset", "smallest");
	        props.put("zookeeper.connect", "172.16.2.90:2181,172.16.2.91:2181,172.16.2.148:2181");
	        props.put("group.id", "333fcdcd");
	        ConsumerConnector consumer   = Consumer.createJavaConsumerConnector(new ConsumerConfig(props));
		   Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
	        topicCountMap.put(topicName, new Integer(1));
	       
	        Map<String, List<KafkaStream<byte[], byte[]>>> consumerMap =  consumer.createMessageStreams(topicCountMap);
	        KafkaStream<byte[], byte[]> stream = consumerMap.get(topicName).get(0);
	        ConsumerIterator<byte[], byte[]> it = stream.iterator();        
 
 
	        while (it.hasNext()) {
	        	System.out.println("#############");
	            String userActionData = new String((byte[]) it.next().message());
	            try {
	                JSONObject json = JSONObject.parseObject(userActionData);
	                System.out.println(json);
	            } catch (Exception e) {
	                continue;
	            }           
	        }
	        
	}
 
	public static void main(String[] args) throws IOException {
//		listTopic();
//		consumeMessage("cannlTest");
		queryTopic("kafka.topic.data.record.test");
	}
}
