package com.wang.helloworld;

import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.Scanner;
import java.util.concurrent.ExecutorService;

import kafka.message.MessageAndMetadata;
import kafka.serializer.StringEncoder;
//import kafka.utils.VerifiableProperties;

import com.wang.helloworld.client.KafkaConsumer;
import com.wang.helloworld.client.KafkaProducer;
import com.wang.helloworld.client.KeyDividedPartitioner;
import com.wang.helloworld.event.EventManager.MessageEvent;
import com.wang.helloworld.event.EventManager.MessageEventListener;

public class HelloKafka implements MessageEventListener{
	private static final String topic = "test";
	public static final int partitions = 3;
	
	protected static final String broker_list = "dev.findsing.com:9092";
	protected static final Class<StringEncoder> serializer_encode_class = StringEncoder.class;
	protected static final Class<KeyDividedPartitioner> serializer_partitioner_class = KeyDividedPartitioner.class;

	protected static final String zookeeper_connection = "dev.findsing.com:2181";

	protected static final String group_id = "group_1";

	// create 3 threads to consume from each of the partitions
	public static List<ExecutorService> executorManager = new ArrayList<ExecutorService>();
	
	public static void main(String[] args) {
		final HelloKafka kafkaClient = new HelloKafka();
		KafkaProducer producer = kafkaClient.publish();
		new Thread(){
			@Override
			public void run() {
				KafkaConsumer consumer = kafkaClient.subscriber(topic);
			}
		}.start();

		Scanner console = new Scanner(System.in);
		do{
			System.out.print("> ");
			String command = console.nextLine();
			if(command.equals("exit"))
			{
				for(ExecutorService executor: executorManager)
				{
					executor.shutdownNow();
				}
				return;
			}

			producer.publish(topic, command);
		}while(true);
	}

	protected KafkaProducer publish() {
		Properties producerProps = new Properties();
		producerProps.put("serializer.class", serializer_encode_class.getName());
		producerProps.put("partitioner.class", serializer_partitioner_class.getName());
		producerProps.put("zookeeper.connect", zookeeper_connection);
		producerProps.put("metadata.broker.list", broker_list);
		producerProps.put("request.required.acks", "1");

		KafkaProducer kafkaProducer = new KafkaProducer(producerProps);
		
		return kafkaProducer;
	}

	protected KafkaConsumer subscriber(String topic) {
		Properties consumerProps = new Properties();
		consumerProps.put("zookeeper.connect", zookeeper_connection);
		consumerProps.put("group.id", group_id);
		consumerProps.put("zk.sessiontimeout.ms", "400");
		consumerProps.put("zk.synctime.ms", "200");
		consumerProps.put("consumer.timeout.ms", "5000");
		consumerProps.put("autocommit.interval.ms", "1000");

		KafkaConsumer consumer = new KafkaConsumer(consumerProps);
		consumer.addMessageListener(this);
		consumer.subscribe(topic);
		
		return consumer;
	}

	public void receiveEvent(MessageEvent event) {
		MessageAndMetadata<byte[], byte[]> messageAndMetadata = event.getEvent();
		
		try {
			String message = new String(messageAndMetadata.message(), "UTF-8");
			if(null != messageAndMetadata.key())
			{
				System.out.println("[Partition-" + messageAndMetadata.partition() + "]" 
						+ "[Key-"+ new String(messageAndMetadata.key(),"UTF-8") + "]" +message);
			}
			else
			{
				System.out.println("[Partition-" + messageAndMetadata.partition() + "]" 
						+ "[Key-null]" + message);
			}
		} catch (UnsupportedEncodingException e) {
			e.printStackTrace();
		}
	}
}
