package com.kafka.consumer;

import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;

import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;
import kafka.serializer.StringDecoder;
import kafka.utils.VerifiableProperties;
import net.sf.json.JSONObject;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.kafka.core.BaseClass;
import com.kafka.enumtype.MsgType;
import com.kafka.util.HttpUtil;

public class UserConsumer extends BaseClass implements Runnable{
	public static Logger log = LoggerFactory.getLogger(UserConsumer.class);
	private final ConsumerConnector consumer;

	private static String TOPIC;
	private static String registerUri;
	private static String updateUri;
	private static String userBlackListUri;
	private static String userActivateUri;
	private static String userCloseUri;

	public UserConsumer() {
		Properties props = new Properties();
		try {
			props.load(ClassLoader
					.getSystemResourceAsStream("user.properties"));
			TOPIC = props.getProperty("topic");
			registerUri = props.getProperty("register_uri");
			updateUri = props.getProperty("update_uri");
			userBlackListUri = props.getProperty("sets_uri");
			userActivateUri = props.getProperty("sets_uri");
			userCloseUri = props.getProperty("sets_uri");
		} catch (IOException e) {
			e.printStackTrace();
		}
		ConsumerConfig config = new ConsumerConfig(props);
		consumer = kafka.consumer.Consumer.createJavaConsumerConnector(config);
	}

	public void consume() throws IOException {
		Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
		topicCountMap.put(TOPIC, new Integer(1));

		StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties());
		StringDecoder valueDecoder = new StringDecoder(
				new VerifiableProperties());

		Map<String, List<KafkaStream<String, String>>> consumerMap = consumer
				.createMessageStreams(topicCountMap, keyDecoder, valueDecoder);
		KafkaStream<String, String> stream = consumerMap.get(TOPIC).get(0);
		ConsumerIterator<String, String> it = stream.iterator();
		// 遍历KafkaStream
		while (it.hasNext()) {
			// 获得MessageAndMetadata对象
			MessageAndMetadata<String, String> mam = it.next();
			try {
				if(!mam.message().isEmpty()){
//					String jsonStr = new String(mam.message().getBytes(), encode);
					String jsonStr = mam.message();
					JSONObject json = rebuildJSON(JSONObject.fromObject(jsonStr));
					if(MsgType.REGISTER.toString().equals(json.getString("msgtype"))){
						//注册用户
						log.info("#######HttpUtil_print|REGISTER:"+json.getString("datas"));
						HttpUtil.doPost(registerUri, json.getString("datas"),json.getString("appid"),json.getString("secretkey"));
					}else if(MsgType.UDATE.toString().equals(json.getString("msgtype"))){
						//更新用户
						HttpUtil.doPost(updateUri, json.getString("datas"),json.getString("appid"),json.getString("secretkey"));
						log.info("#######HttpUtil_print|UPDATE:"+json.getString("datas"));
					}else if(MsgType.ACTIVATEUSER.toString().equals(json.getString("msgtype"))){
						//激活
						HttpUtil.doPost(userActivateUri, json.getString("datas"),json.getString("appid"),json.getString("secretkey"));
						log.info("#######HttpUtil_print|UPDATE:"+json.getString("datas"));
					}else if(MsgType.USERBLACKLIST.toString().equals(json.getString("msgtype"))){
						//用户黑名单
						HttpUtil.doPost(userBlackListUri, json.getString("datas"),json.getString("appid"),json.getString("secretkey"));
						log.info("#######HttpUtil_print|UPDATE:"+json.getString("datas"));
					}else if(MsgType.CLOSEUSER.toString().equals(json.getString("msgtype"))){
						//注销
						HttpUtil.doPost(userCloseUri, json.getString("datas"),json.getString("appid"),json.getString("secretkey"));
						log.info("#######HttpUtil_print|UPDATE:"+json.getString("datas"));
					}else {
						throw new Exception("No match msgType!");
					}
	//				log.info("###RegisterConsumer:"+json.getString("datas")+",result:"+rs);
					addMessage(mam,success,"Send Message successful！");
				}else{
					addMessage(mam,fail,"message is empty!");
				}
			} catch (Exception e) {
				addMessage(mam,fail,e.toString());
//				log.info("###RegisterConsumer:"+e.getMessage()+",result:"+rs);
			}
			
		}
	}

	@Override
	public void run() {
		try {
			consume();
		} catch (UnsupportedEncodingException e) {
			e.printStackTrace();
		} catch (IOException e) {
			e.printStackTrace();
		}
	}
}

//public class RegisterConsumer {
//
//	private final ConsumerConnector consumer;
//	private ExecutorService executor;
//	
//	private static String TOPIC;
//	private static String ENCODE;
//	private static String postUri;
//
//	public RegisterConsumer() {
//		Properties props = new Properties();
//		try {
//			props.load(ClassLoader
//					.getSystemResourceAsStream("register.properties"));
//			TOPIC = props.getProperty("topic");
//			ENCODE = props.getProperty("encode");
//			postUri = props.getProperty("post_uri");
//		} catch (IOException e) {
//			e.printStackTrace();
//		}
//		ConsumerConfig config = new ConsumerConfig(props);
//		consumer = kafka.consumer.Consumer.createJavaConsumerConnector(config);
//	}
//	
//    public void shutdown() {
//        if (consumer != null)
//            consumer.shutdown();
//        if (executor != null)
//            executor.shutdown();
//    }
//
//	public void consume(int numThreads) throws IOException {
//		Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
//		topicCountMap.put(TOPIC, new Integer(numThreads));
//
//		StringDecoder keyDecoder = new StringDecoder(new VerifiableProperties());
//		StringDecoder valueDecoder = new StringDecoder(
//				new VerifiableProperties());
//
//		Map<String, List<KafkaStream<String, String>>> consumerMap = consumer
//				.createMessageStreams(topicCountMap, keyDecoder, valueDecoder);
//		List<KafkaStream<String, String>> streams = consumerMap.get(TOPIC);
//		
//        // now launch all the threads
//        executor = Executors.newFixedThreadPool(numThreads);
// 
//        // now create an object to consume the messages
//        //
//        int threadNumber = 0;
//        for (final KafkaStream stream : streams) {
//            executor.submit(new ConsumerMsgTask(stream, threadNumber,postUri,ENCODE));
//            threadNumber++;
//        }
//	}
//}