package com.mutouren.common.mq.kafka;

/*
 * the class to kafka Old Consumer api
 * because it's dependency Contain lots of jar, may discard it
 * 
 * delete
		<dependency>
		    <groupId>org.apache.kafka</groupId>
		    <artifactId>kafka_2.11</artifactId>
		    <version>0.10.1.0</version>
		</dependency>
			
    replace
		<dependency>
		    <groupId>org.apache.kafka</groupId>
		    <artifactId>kafka-clients</artifactId>
		    <version>0.10.1.0</version>
		</dependency>
 * 
 * */
/*
import java.io.Closeable;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import org.apache.kafka.common.serialization.Deserializer;

import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;
import kafka.serializer.Decoder;

public class Consumer implements Closeable{

	private ConsumerConnector consumer;
	private Properties props;
	private ExecutorService executor = Executors.newCachedThreadPool();
	
	public Consumer(String zookeeperConnect, String groupId) {
		this(zookeeperConnect, groupId, false);
	}	
	
	public Consumer(String zookeeperConnect, String groupId, boolean isSmallestOffset) {
		this(getProperties(zookeeperConnect, groupId, isSmallestOffset));
	}
	
	public Consumer(Properties props) {
		this.props = props;
		this.consumer = kafka.consumer.Consumer.createJavaConsumerConnector(new ConsumerConfig(this.props));
	}
	
	private static Properties getProperties(String zookeeperConnect, String groupId, boolean isSmallestOffset) {
        Properties props = new Properties();
        props.put("zookeeper.connect", zookeeperConnect);
        props.put("group.id", groupId);
        
        if (isSmallestOffset) {
        	props.put("auto.offset.reset", "smallest");
        } else {
        	props.put("auto.offset.reset", "largest");
        }
                        
//		props.put("socket.timeout.ms", "30000");        
//		props.put("zookeeper.session.timeout.ms", "6000");
//		props.put("zookeeper.sync.time.ms", "2000");
//		props.put("auto.commit.enable", true);
//		props.put("auto.commit.interval.ms", "60000");        
        
//		props.put("rebalance.backoff.ms", "2000");
//		props.put("rebalance.max.retries", "4");        
        
		return props;
	}
	
	@SuppressWarnings({ "unchecked", "resource" })
	public static <T> Deserializer<T> getDeserializer(Class<T> clazz) {
		Deserializer<T> result;
		
		if (clazz== Integer.class) {
			result = (Deserializer<T>) new org.apache.kafka.common.serialization.IntegerDeserializer();
		} else if (clazz == Long.class) {
			result = (Deserializer<T>) new org.apache.kafka.common.serialization.LongDeserializer();
		} else if (clazz == byte[].class) {	
			result = (Deserializer<T>) new org.apache.kafka.common.serialization.ByteArrayDeserializer();
		} else if (clazz == String.class) {
			result = (Deserializer<T>) new org.apache.kafka.common.serialization.StringDeserializer();
		} else {
			throw new IllegalArgumentException( "consumer deserializer don't know the type: " + clazz);
		}		
		return result;
	}	
	
	public <K, V> void addListener (final String topic, int threadNum, ConsumerCallback<K, V> callback, 
			Class<K> keyType, Class<V> valueType) {
        Map<String, Integer> topicCountMap = new HashMap<String, Integer>();
        topicCountMap.put(topic, new Integer(threadNum));
        
        Decoder<K> keyDecoder = new Decoder<K> () {
        	final Deserializer<K> deserializer = getDeserializer(keyType);
			@Override
			public K fromBytes(byte[] value) {
				return deserializer.deserialize(topic, value);
			}        	
        };
        Decoder<V> valueDecoder = new Decoder<V> () {
        	final Deserializer<V> deserializer = getDeserializer(valueType);
			@Override
			public V fromBytes(byte[] value) {
				return deserializer.deserialize(topic, value);
			}        	
        };  
        
        Map<String, List<KafkaStream<K, V>>> consumerMap = consumer.createMessageStreams(topicCountMap, keyDecoder, valueDecoder);
                
        List<KafkaStream<K, V>> listStream = consumerMap.get(topic);
        for(KafkaStream<K, V> stream : listStream) {
        	executor.submit(new ConsumerThread<K, V>((KafkaStream<K, V>)stream, callback));
        }
	}
	
	@Override
	public void close() {
		this.executor.shutdownNow();
		this.consumer.shutdown();
	}
	
	static interface ConsumerCallback<K, V> {
		void callback(MessageAndMetadata<K, V> messageAndMetadata);
	}
	
	static class ConsumerThread<K, V> implements Runnable {
		
		private KafkaStream<K, V> stream;
		private ConsumerCallback<K, V> callback;
		
		public ConsumerThread(KafkaStream<K, V> stream, ConsumerCallback<K, V> callback) {
			this.stream = stream;
			this.callback = callback;
		}

		@Override
		public void run() {
			System.out.println("ConsumerThread start: " + Thread.currentThread().getName());
			ConsumerIterator<K, V> it = stream.iterator();
			while (it.hasNext()) {
				MessageAndMetadata<K, V> info = it.next();
				//display(info);
				callback.callback(info);
			}			
		}
		
		public void display(MessageAndMetadata<K, V> info) {
			System.out.println(String.format("%s: partition[%d], offset[%d], key=%s, msg=%s", 
					Thread.currentThread().getName(),
					info.partition(),
					info.offset(),
					info.key(),
					info.message()
					));		
		}
		
	}	
}*/
