package com.ibm.cps.forwarder;

import java.util.ArrayList;
import java.util.Collection;
import java.util.concurrent.atomic.AtomicBoolean;

import com.ibm.cps.kafka.KafkaTopicConstructorForMultiTenants;
import com.ibm.util.LocalConfig;

import kafka.producer.KeyedMessage;

import org.apache.log4j.Logger;

import com.google.common.base.Throwables;
import com.ibm.cps.kafka.KafkaProducer;
import com.ibm.cps.message.MessageFactory;
import com.ibm.cps.message.RawDataMessage;
import com.ibm.cps.newmessage.AbstractDataSourceMetadata;
import com.ibm.interfaces.IMessageClient;
import com.ibm.util.ErrorCode;
import com.ibm.util.exception.CPSException;
/**
 * Created by gongxuan on 11/30/15.
 */

public class MessageListenerV2  extends Thread{
    private AbstractDataSourceMetadata dataSource;
    private Logger logger;
    private String tenantid;
    private KafkaProducer producer;
    private IMessageClient consumerClient;
    private static final int errorTimesThreshold = 100;
    private int errorTimes = 0;
    private AtomicBoolean isAlive = new AtomicBoolean(true);
    private KafkaTopicConstructorForMultiTenants kafkaTopicConstructorForMultiTenants = new KafkaTopicConstructorForMultiTenants();
    public MessageListenerV2(String kafkaBroker,
                             AbstractDataSourceMetadata dataSource) throws CPSException {
        this.dataSource = dataSource;
        this.logger = Logger.getLogger(this.getClass());
        this.tenantid = dataSource.getTenantid();
        producer = new KafkaProducer(kafkaBroker);
        consumerClient = dataSource.getDataSourceConsumerClient();
        consumerClient.subscribe(dataSource.getSubscribeTarget());
    }

    @Override
    public void run() {
        String kafkaMessageTopic = kafkaTopicConstructorForMultiTenants
                .getMessageTopic(tenantid);
        String schemaid;
        String sourceid;

        while (isAlive.get() && errorTimes < errorTimesThreshold) {
            try {
                long start = System.currentTimeMillis();
                Collection<String> mqMessages = consumerClient.consumeMessage();
                long end = System.currentTimeMillis();
                long time = end -start;
                start = System.currentTimeMillis();
                logger.info("=======TIME===ReceiveDataFromMQ:"+time+" ms ===================");
                if (mqMessages == null) {
                    // empty metrics
                    logger.info("no message from " + consumerClient + " to " + kafkaMessageTopic);
                    continue;
                }
                int count = 0;
                ArrayList<KeyedMessage<String, byte[]>> keyedmessages = new ArrayList<KeyedMessage<String, byte[]>>();
                for (String mqMessage : mqMessages) {
                    count++;
                    if (mqMessage == null) {
                        continue;
                    }
                    schemaid = dataSource.getSchemaid();
                    sourceid = dataSource.getName();
                    byte[] msgBytes = MessageFactory.generateBinaryDataMessage(
                            tenantid, schemaid, sourceid, mqMessage);
                    RawDataMessage message = MessageFactory
                            .produceDataMessage(msgBytes);
                    if (message == null) {
                        continue;
                    }
                    logger.info("TsKey:"+message.getTsKey());
                    logger.info("parti:" + message.getTsKey().hashCode() % 2);
                    logger.info("---------------------------");

                    KeyedMessage<String, byte[]> keyedmessage = new KeyedMessage<String, byte[]>(
                            kafkaMessageTopic, kafkaMessageTopic ,(message.getTsKey().hashCode() % LocalConfig.KAFKA_PARTITION) + "",msgBytes);
//                    kafkaMessageTopic, kafkaMessageTopic,0 + "",msgBytes);

                    keyedmessages.add(keyedmessage);
//					logger.info("Forward message " + mqMessage
//							+ " from datasource " + dataSource.getName());
                }
                logger.info("========Count:"+count+"========");
                try {
                    producer.send(keyedmessages);
                } catch (Exception e) {
                    logger.error(Throwables.getStackTraceAsString(e));
                    throw new CPSException(ErrorCode.INTERNAL_ERROR_CODE,
                            Throwables.getStackTraceAsString(e));
                }
                end = System.currentTimeMillis();
                time = end -start;
                logger.info("=======TIME===SandDataToKafka:"+time+" ms ===================");

            } catch (CPSException e) {
                errorTimes++;
                logger.error(Throwables.getStackTraceAsString(e));
            }
        }

        if (errorTimes >= errorTimesThreshold) {
            logger.error("More than "
                    + errorTimesThreshold
                    + " exceptions are caught by MQListener, so we stop the listener.");
        }
        if (!isAlive.get()) {
            logger.info("We stop the listener thread for data source "
                    + dataSource.getName() + ".");
        }
    }

    public void removeDataSource() {
        consumerClient.close();
        isAlive.set(false);
    }
}
