package com.ibm.cps.forwarder;

import java.util.ArrayList;
import java.util.Collection;
import java.util.concurrent.atomic.AtomicBoolean;

import com.ibm.cps.kafka.KafkaTopicConstructorForMultiTenants;
import kafka.producer.KeyedMessage;

import org.apache.log4j.Logger;

import com.google.common.base.Throwables;
import com.ibm.cps.kafka.KafkaProducer;
import com.ibm.cps.kafka.KafkaTopicConstructor;
import com.ibm.cps.message.MessageFactory;
import com.ibm.cps.message.RawDataMessage;
import com.ibm.cps.newmessage.AbstractDataSourceMetadata;
import com.ibm.interfaces.IMessageClient;
import com.ibm.util.ErrorCode;
import com.ibm.util.exception.CPSException;

public class MessageListener extends Thread {
	private KafkaTopicConstructorForMultiTenants kafkaTopicConstructorForMultiTenants = new KafkaTopicConstructorForMultiTenants();

	private AbstractDataSourceMetadata dataSource;
	private Logger logger;
	private String tenantid;
	private KafkaProducer producer;
	private IMessageClient consumerClient;
	private static final int errorTimesThreshold = 100;
	private int errorTimes = 0;
	private AtomicBoolean isAlive = new AtomicBoolean(true);

	public MessageListener(String kafkaBroker,
			AbstractDataSourceMetadata dataSource) throws CPSException {
		this.dataSource = dataSource;
		this.logger = Logger.getLogger(this.getClass());
		this.tenantid = dataSource.getTenantid();
		producer = new KafkaProducer(kafkaBroker);
		consumerClient = dataSource.getDataSourceConsumerClient();
		consumerClient.subscribe(dataSource.getSubscribeTarget());
	}

	@Override
	public void run() {
		String kafkaMessageTopic = kafkaTopicConstructorForMultiTenants
				.getMessageTopic(tenantid);
		String schemaid;
		String sourceid;

		logger.info("Start forwarding for tenant " + tenantid);
		while (isAlive.get() && errorTimes < errorTimesThreshold) {
			try {
				long start = System.currentTimeMillis();
				Collection<String> mqMessages = consumerClient.consumeMessage();
				long end = System.currentTimeMillis();
				long time = end -start;
				start = System.currentTimeMillis();
				logger.info("==========Time1:"+time+"===================");
				if (mqMessages == null) {
					// empty metrics
					logger.info("no message from " + consumerClient + " to " + kafkaMessageTopic);
					continue;
				}
				int count = 0;
				ArrayList<KeyedMessage<String, byte[]>> keyedmessages = new ArrayList<KeyedMessage<String, byte[]>>();
				for (String mqMessage : mqMessages) {
					logger.info("Forward message " + mqMessage
							+ " from datasource " + dataSource.getName());
					count++;
					if (mqMessage == null) {
						continue;
					}
					schemaid = dataSource.getSchemaid();
					sourceid = dataSource.getName();
					byte[] msgBytes = MessageFactory.generateBinaryDataMessage(
							tenantid, schemaid, sourceid, mqMessage);
					RawDataMessage message = MessageFactory
							.produceDataMessage(msgBytes);
					if (message == null) {
						continue;
					}
					KeyedMessage<String, byte[]> keyedmessage = new KeyedMessage<String, byte[]>(
							kafkaMessageTopic, message.getTsKey() + "", msgBytes);
					keyedmessages.add(keyedmessage);
				}	
				logger.info("========Count:"+count+"========");
				try {				
					producer.send(keyedmessages);
				} catch (Exception e) {
					logger.error(Throwables.getStackTraceAsString(e));
					throw new CPSException(ErrorCode.INTERNAL_ERROR_CODE,
							Throwables.getStackTraceAsString(e));
				}
				end = System.currentTimeMillis();
				time = end - start;
				logger.info("==========Time2:"+time+"===================");
				
			} catch (CPSException e) {
				errorTimes++;
				logger.error(Throwables.getStackTraceAsString(e));
			}
		}

		if (errorTimes >= errorTimesThreshold) {
			logger.error("More than "
					+ errorTimesThreshold
					+ " exceptions are caught by MQListener, so we stop the listener.");
		}
		if (!isAlive.get()) {
			logger.info("We stop the listener thread for data source "
					+ dataSource.getName() + ".");
		}
	}

	public void removeDataSource() {
		consumerClient.close();
		isAlive.set(false);
	}

}
