package com.ibm.cps.forwarder;

import java.io.IOException;
import java.util.Collection;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

import com.ibm.cps.kafka.KafkaTopicConstructorForMultiTenants;
import org.apache.log4j.FileAppender;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;
import org.apache.log4j.SimpleLayout;

import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.base.Throwables;
import com.ibm.cps.kafka.KafkaTopicConstructor;
import com.ibm.cps.kafka.listener.ForwarderAddDataSourceListener;
import com.ibm.cps.kafka.listener.ForwarderDeleteDataSourceListener;
import com.ibm.cps.message.MessageFields;
import com.ibm.cps.newmessage.AbstractDataSourceMetadata;
import com.ibm.cps.newmessage.DataSourceMetadataFactory;
import com.ibm.cps.processors.ProcessorFactory;
import com.ibm.interfaces.IPersistency;
import com.ibm.mongo.MongoPersistency;
import com.ibm.util.ErrorCode;
import com.ibm.util.JSONUtility;
import com.ibm.util.LocalConfig;
import com.ibm.util.SimpleQueue;
import com.ibm.util.exception.CPSException;

/**
 * forward message from message queue to kafka
 * 
 * @author Jingjing Wang
 * 
 *         Jan 26, 2015
 */
public class MessageForwarder {

	private String tenantSerialId;
	private IPersistency persistency;
	private SimpleQueue addedDataSources;
	private static Logger logger;
	private Map<String, MessageListener> ds2Listener = new ConcurrentHashMap<String, MessageListener>();
	private KafkaTopicConstructorForMultiTenants kafkaTopicConstructorForMultiTenants = new KafkaTopicConstructorForMultiTenants();

	private static ExecutorService executorService = Executors.newCachedThreadPool(new ThreadHandlerFactory());

	public MessageForwarder(String tenantSerialId) throws Exception {
		PropertyConfigurator.configure(LocalConfig.getLoggerProperties());
		logger = Logger.getLogger(MessageForwarder.class);
		this.tenantSerialId = tenantSerialId;
		addedDataSources = new SimpleQueue();
		persistency = MongoPersistency.getInstace();

		Collection<String> existedTopics = persistency.getDataSource(tenantSerialId, null);
		String type;
		ObjectNode node;
		if (existedTopics != null) {
			for (String topic : existedTopics) {
				node = JSONUtility.fromObjectString(topic);
				type = JSONUtility.GetString(MessageFields.TYPE, node);
				
				// filter the static data source
				if (DataSourceMetadataFactory.isStaticDataSource(type)) {
					continue;
				}
				AbstractDataSourceMetadata ds = DataSourceMetadataFactory.parseDataSourceMetadata(node, type);
				if (ds != null) {
					addedDataSources.put(ds);
					logger.debug("Load existed data source: " + JSONUtility.fromObjectString(topic));
				}
			}
		}
	}

	public void initLog() throws CPSException {
		SimpleLayout layout = new SimpleLayout();
		String logPath = tenantSerialId + "forwarder.log";
		try {
			FileAppender appender = new FileAppender(layout, logPath, false);
			logger.addAppender(appender);
		} catch (IOException e) {
			throw new CPSException(ErrorCode.INVALID_INPUT_ERROR, "can't create file appender " + logPath);
		}
		logger.setLevel((Level) Level.DEBUG);
		logger.info("Forwarder log is written to file " + logPath);
	}

	/**
	 * listen to the message queue always
	 *
	 * @throws CPSException
	 * @throws InterruptedException
	 *
	 * @throws Exception
	 */
	public void forward() throws CPSException {
		try {
			while (true) {
				AbstractDataSourceMetadata dataSourceInfo = (AbstractDataSourceMetadata) addedDataSources.take();
				addDataSource(dataSourceInfo);
			}
		} catch (InterruptedException e) {
			throw new CPSException(ErrorCode.INTERNAL_ERROR_CODE, e.getMessage());
		}
	}

	public boolean addDataSource(AbstractDataSourceMetadata dataSource) throws CPSException {
		if (dataSource == null) {
			return true;
		}

		if (ds2Listener.containsKey(dataSource.getName())) {
			logger.warn("Data source " + dataSource.getName() + " has already been added.");
			return true;
		}
		MessageListener dsListener = new MessageListener(LocalConfig.KAFKA_BROKERS, dataSource);
		ds2Listener.put(dataSource.getName(), dsListener);
		executorService.submit(dsListener);
		logger.debug("Add data source " + dataSource.getName() + " successfully.");
		return true;
	}

	/**
	 * create a new thread to listen to data source kafka topic
	 * 
	 * @throws CPSException
	 */
	public void createListeners() throws CPSException {

		String metadataTopic = kafkaTopicConstructorForMultiTenants.getDataSourceAddTopic(tenantSerialId);
		String groupid = tenantSerialId + "_addGroup";
		executorService.execute(new ForwarderAddDataSourceListener(LocalConfig.ZOOKEEPER_HOST_PORT, groupid,
				metadataTopic, tenantSerialId, addedDataSources));
		try {
			Thread.sleep(1000);
		} catch (InterruptedException e) {
			logger.error(Throwables.getStackTraceAsString(e));
		}
		metadataTopic = kafkaTopicConstructorForMultiTenants.getDataSourceDeleteTopic(tenantSerialId);
		groupid = tenantSerialId + "_deleteDataSource";
		executorService.execute(new ForwarderDeleteDataSourceListener(LocalConfig.ZOOKEEPER_HOST_PORT, groupid,
				metadataTopic, tenantSerialId, ds2Listener));
	}

	public static void main(String[] args) throws Exception {
		if (args == null || args.length != 1) {
			System.err.println("Usage: Program <tenantSerialId>");
			return;
		}
		String tenantid = args[0];
		MessageForwarder forwarder = new MessageForwarder(tenantid);
		forwarder.createListeners();
		forwarder.forward();
	}

}

