package com.ibm.cps.forwarder;

import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.base.Throwables;
import com.ibm.cps.kafka.KafkaTopicConstructorForMultiTenants;
import com.ibm.cps.kafka.listener.ForwarderAddDataSourceListener;
import com.ibm.cps.kafka.listener.ForwarderDeleteDataSourceListenerV2;
import com.ibm.cps.message.MessageFields;
import com.ibm.cps.newmessage.AbstractDataSourceMetadata;
import com.ibm.cps.newmessage.DataSourceMetadataFactory;
import com.ibm.interfaces.IPersistency;
import com.ibm.mongo.MongoPersistency;
import com.ibm.util.ErrorCode;
import com.ibm.util.JSONUtility;
import com.ibm.util.LocalConfig;
import com.ibm.util.SimpleQueue;
import com.ibm.util.exception.CPSException;
import org.apache.log4j.*;

import java.io.IOException;
import java.util.Collection;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

/**
 * Created by gongxuan on 12/3/15.
 */
public class MessageForwarderV2 {

    private String tenantSerialId;
    private IPersistency persistency;
    private SimpleQueue addedDataSources;
    private static Logger logger;
    private Map<String, MessageListenerV2> ds2Listener = new ConcurrentHashMap<String, MessageListenerV2>();
    private KafkaTopicConstructorForMultiTenants kafkaTopicConstructorForMultiTenants = new KafkaTopicConstructorForMultiTenants();

    private static ExecutorService executorService = Executors.newCachedThreadPool(new ThreadHandlerFactory());

    public MessageForwarderV2(String tenantSerialId) throws Exception {
        PropertyConfigurator.configure(LocalConfig.getLoggerProperties());
        logger = Logger.getLogger(MessageForwarderV2.class);
        this.tenantSerialId = tenantSerialId;
        addedDataSources = new SimpleQueue();
        persistency = MongoPersistency.getInstace();

        Collection<String> existedDataSources = persistency.getDataSource(tenantSerialId, null);
        String type;
        ObjectNode node;
        if (existedDataSources != null) {
            for (String datasource : existedDataSources) {
                node = JSONUtility.fromObjectString(datasource);
                type = JSONUtility.GetString(MessageFields.TYPE, node);
                AbstractDataSourceMetadata ds = DataSourceMetadataFactory.parseDataSourceMetadata(node, type);
                if (ds != null) {
                    addedDataSources.put(ds);
                    logger.debug("Load existed data source: " + JSONUtility.fromObjectString(datasource));
                }
            }
        }
    }

    public void initLog() throws CPSException {
        SimpleLayout layout = new SimpleLayout();
        String logPath = tenantSerialId + "forwarder.log";
        try {
            FileAppender appender = new FileAppender(layout, logPath, false);
            logger.addAppender(appender);
        } catch (IOException e) {
            throw new CPSException(ErrorCode.INVALID_INPUT_ERROR, "can't create file appender " + logPath);
        }
        logger.setLevel((Level) Level.DEBUG);
        logger.info("Forwarder log is written to file " + logPath);
    }

    /**
     * listen to the message queue always
     *
     * @throws CPSException
     * @throws InterruptedException
     *
     * @throws Exception
     */
    public void forward() throws CPSException {
        try {
            while (true) {
                AbstractDataSourceMetadata dataSourceInfo = (AbstractDataSourceMetadata) addedDataSources.take();
                addDataSource(dataSourceInfo);
            }
        } catch (InterruptedException e) {
            throw new CPSException(ErrorCode.INTERNAL_ERROR_CODE, e.getMessage());
        }
    }

    public boolean addDataSource(AbstractDataSourceMetadata dataSource) throws CPSException {
        if (dataSource == null) {
            return true;
        }

        if (ds2Listener.containsKey(dataSource.getName())) {
            logger.warn("Data source " + dataSource.getName() + " has already been added.");
            return true;
        }
        MessageListenerV2 dsListener = new MessageListenerV2(LocalConfig.KAFKA_BROKERS, dataSource);
        ds2Listener.put(dataSource.getName(), dsListener);
        executorService.submit(dsListener);
        logger.debug("Add data source " + dataSource.getName() + " successfully.");
        return true;
    }

    /**
     * create a new thread to listen to data source kafka topic
     *
     * @throws CPSException
     */
    public void createListeners() throws CPSException {

        String metadataTopic = kafkaTopicConstructorForMultiTenants.getDataSourceAddTopic(tenantSerialId);
        String groupid = tenantSerialId + "_addGroup";
        executorService.execute(new ForwarderAddDataSourceListener(LocalConfig.ZOOKEEPER_HOST_PORT, groupid,
                metadataTopic, tenantSerialId, addedDataSources));
        try {
            Thread.sleep(1000);
        } catch (InterruptedException e) {
            logger.error(Throwables.getStackTraceAsString(e));
        }
        metadataTopic = kafkaTopicConstructorForMultiTenants.getDataSourceDeleteTopic(tenantSerialId);
        groupid = tenantSerialId + "_deleteDataSource";
        executorService.execute(new ForwarderDeleteDataSourceListenerV2(LocalConfig.ZOOKEEPER_HOST_PORT, groupid,
                metadataTopic, tenantSerialId, ds2Listener));
    }

    public static void main(String[] args) throws Exception {
        if (args == null || args.length != 1) {
            System.err.println("Usage: Program <tenantSerialId>");
            return;
        }
        String tenantid = args[0];
        MessageForwarderV2 forwarder = new MessageForwarderV2(tenantid);
        forwarder.createListeners();
        forwarder.forward();
    }

}
