package com.ibm.cps.spark.streaming.adapter;

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import kafka.consumer.KafkaStream;
import kafka.message.MessageAndMetadata;
import backtype.storm.tuple.Values;

import com.fasterxml.jackson.databind.node.ObjectNode;
import com.ibm.cps.kafka.KafkaConsumer;
import com.ibm.cps.kafka.KafkaTopicConstructorForMultiTenants;
import com.ibm.cps.kafka.KafkaTopicConstructor;
import com.ibm.cps.kafka.KafkaTopicConstructorForMultiTenants;
import com.ibm.cps.message.AbstractMessage;
import com.ibm.cps.message.MessageFactory;
import com.ibm.cps.message.RawDataMessage;
import com.ibm.cps.newmessage.AbstractMetadata;
import com.ibm.cps.newmessage.MetadataFactory;
import com.ibm.cps.newmessage.SerializableDataSourceMetadata;
import com.ibm.cps.spark.streaming.EmbeddedTopologyCreator;
import com.ibm.util.JSONUtility;
import com.ibm.util.LocalConfig;
import com.ibm.util.TopologyStreamIds;
import com.ibm.util.exception.CPSException;

/**
 * Created by telekinesis on 4/28/15.
 */
public class TestEmbeddedTopology {
	private static final KafkaTopicConstructor topicCreator = new KafkaTopicConstructorForMultiTenants();

	private static final String tenantid = "569f2dcf728d4169561c9956";
	private static final String dataTopic = topicCreator
			.getMessageTopic(tenantid);
	private static final String processorTopic = topicCreator
			.getMetadataAddTopic(tenantid);
	private static final String processorDeleteTopic = topicCreator
			.getMetadataDeleteTopic(tenantid);
	private static final String dataMessageTopic = topicCreator
			.getMessageTopic(tenantid);
	private static final String dataSourceTopic = topicCreator
			.getDataSourceAddTopic(tenantid);
	private static final String dataSourceDeleteTopic = topicCreator
			.getDataSourceDeleteTopic(tenantid);

	public static void main(String[] args) throws CPSException {
		SparkEmbeddedCollector collector = buildEmbeddedStormTopology();
		connectCollectorWithKafka(collector);
	}

	private static SparkEmbeddedCollector buildEmbeddedStormTopology()
			throws CPSException {
		int MOCKUP_TASK_ID = 0;
		SparkEmbeddedCollector collector = EmbeddedTopologyCreator.create(
				tenantid, MOCKUP_TASK_ID);
		return collector;
	}

	private static void connectCollectorWithKafka(
			final SparkEmbeddedCollector collector) {
		KafkaConsumer consumer = new KafkaConsumer(
				LocalConfig.ZOOKEEPER_HOST_PORT);

		Map<String, Integer> topics = new HashMap<>();
		topics.put(dataTopic, 1);
		topics.put(dataMessageTopic, 1);
		// topics.put("TEST_SPARK_PROCESSORS", 1);
		topics.put(processorTopic, 1);
		topics.put(processorDeleteTopic, 1);
		topics.put(dataSourceTopic, 1);
		topics.put(dataSourceDeleteTopic, 1);

		Map<String, List<KafkaStream<byte[], byte[]>>> consumerStreams = consumer
				.consume(topics);

		System.out.println("connected collector with kafka");

		ExecutorService executor = Executors.newFixedThreadPool(6);
		for (final List<KafkaStream<byte[], byte[]>> streams : consumerStreams
				.values()) {
			for (final KafkaStream<byte[], byte[]> stream : streams) {
				executor.submit(new Runnable() {
					public void run() {
						System.out.println("consumer thread started");
						for (MessageAndMetadata msg : stream) {
							byte[] message = (byte[]) msg.message();
							String topic = msg.topic();
							try {
								if (topic.equals(dataMessageTopic)) {
									System.out.println("received spark data");
									// byte[] wrappedDataMessage =
									// wrapDataJson(message);
									RawDataMessage parsedData = MessageFactory
											.produceDataMessage(message);
									System.out.println("parsedData: "
											+ parsedData.toString());
									collector.sendDataToBolt(
											TopologyStreamIds.RAW_DATA_STREAM,
											new Values(parsedData, parsedData
													.getTsKey()));
								} else if (topic.equals(processorTopic)) {
									System.out.println("add processor");
									AbstractMetadata meta = MetadataFactory
											.parseJsonMetadata(message
													.toString());
									collector
											.sendDataToBolt(
													TopologyStreamIds.PROCESSOR_SPOUT_STREAM,
													new Values(meta));
								} else if (topic.equals(processorDeleteTopic)) {
									System.out.println("delete processor");
									AbstractMetadata meta = MetadataFactory
											.parseJsonMetadata(message
													.toString());
									collector
											.sendDataToBolt(
													TopologyStreamIds.PROCESSOR_DELETE_SPOUT_STREAM,
													new Values(meta));
								} else if (topic.equals(dataSourceTopic)) {
									System.out.println("add datasource");
									SerializableDataSourceMetadata source = new SerializableDataSourceMetadata(
											message.toString());
									collector
											.sendDataToBolt(
													TopologyStreamIds.DATASOURCE_SPOUT_STREAM,
													new Values(source));
								} else {
									System.out.println("delete datasource");
									SerializableDataSourceMetadata source = new SerializableDataSourceMetadata(
											message.toString());
									collector
											.sendDataToBolt(
													TopologyStreamIds.DATASOURCE_DELETE_SPOUT_STREAM,
													new Values(source));
								}
							} catch (Exception ex) {
								System.out.println("Failed to parse "
										+ new String(message));
								ex.printStackTrace();
							}
						}
						System.out.println("consumer thread ended");
					}

					private AbstractMessage parseProcessor(byte[] bytes)
							throws CPSException {
						String dataString = new String(bytes);
						ObjectNode objectNode = (ObjectNode) JSONUtility
								.fromString(dataString);
						System.out.println("Object node " + objectNode);
						String messageType = objectNode.get("type").asText();
						return MessageFactory.produceAbstractMessage(
								messageType, bytes);
					}
				});
			}
		}

	}
}
