package com.ibm.cps.model;

import com.google.common.base.Throwables;
import com.ibm.factories.PublishProcessorFactory;
import com.ibm.interfaces.AbstractMessagePublisher;
import com.ibm.interfaces.IPersistency;
import com.ibm.mongo.MongoPersistency;
import com.ibm.util.ErrorCode;
import com.ibm.util.exception.CPSException;
import org.apache.commons.codec.DecoderException;
import org.apache.commons.codec.binary.Hex;
import org.apache.log4j.Logger;

import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.locks.ReentrantReadWriteLock;

/**
 * Created by firephoenix on 16-2-29.
 */
public class PublisherMonitor {

        // private static class DataSourceMonitorInstance{
        // private static final DataSourceMonitor instance = new
        // DataSourceMonitor();
        // }
        //
        // public static final DataSourceMonitor getInstance(){
        // return DataSourceMonitorInstance.instance;
        // }
        private static PublisherMonitor pmonitor;

        // private Map<String, AbstractDataSourceMetadata> name2DataSource;
        private Map<String, PublishProcessorFactory> tenantid2PFactory;
        private boolean isFirst = true;
        private Logger logger = Logger.getLogger(PublisherMonitor.class);
        private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();

        public static PublisherMonitor getInstance() {
            if (pmonitor == null) {
                pmonitor = new PublisherMonitor();
            }
            return pmonitor;
        }

        private PublisherMonitor() {
            tenantid2PFactory = new HashMap<String, PublishProcessorFactory>();
        }

        public PublishProcessorFactory getFactory(String tenantid) throws CPSException {
            cachePublisher();
            if (tenantid2PFactory.get(tenantid) == null) {
                tenantid2PFactory.put(tenantid,new PublishProcessorFactory());
                //throw new CPSException(ErrorCode.NULL_FIELD_ERROR, "null factory");
            }
            return tenantid2PFactory.get(tenantid);
        }
/*
        // append tenantid,schemaid in the objectNode
        private static void appendFields(ObjectNode objectNode, String tenantid)
                throws CPSException {
            // checkValidateId(objectNode, MessageFields.NAME);
            checkValidateId(objectNode, MessageFields.SCHEMAID);
            checkValidateId(objectNode, MessageFields.TENANTID);
            // JSONUtility.assignRandomValue(objectNode, MessageFields.NAME);
            JSONUtility.assignRandomValue(objectNode, MessageFields.SCHEMAID);
            objectNode.put(MessageFields.TENANTID, tenantid);
        }

        private static void checkValidateId(ObjectNode node, String fieldName)
                throws CPSException {
            String id = null;
            try {
                if (node.has(fieldName)) {
                    id = node.get(fieldName).asText();
                    Hex.decodeHex(id.toCharArray());
                }
            } catch (Exception e) {
                throw new CPSException(ErrorCode.PARSE_ERROR_CODE, "Invalid "
                        + fieldName + " " + id + ", it should be hexadecimal.");
            }
        }

        public AbstractDataSourceMetadata validateDataSource(String tenantid,
                                                             ObjectNode objectNode) throws CPSException {
            cacheDatasources();
            appendFields(objectNode, tenantid);
            AbstractDataSourceMetadata metadata = DataSourceMetadataFactory
                    .parseDataSourceMetadata(objectNode.toString());

            DataSourceFactory factory = tenantid2DSFactory.get(tenantid);
            if(factory == null || !factory.containsDataSource(metadata.getName())){
                addDataSource(tenantid, metadata);
            } else {
                throw new CPSException(ErrorCode.ALREADY_EXIST_CODE, "Data source "
                        + metadata.getName() + " already existing.");
            }
            return metadata;
        }
*/
        private void cachePublisher() {
            try {
                lock.readLock().lock();
                try {
                    if (isFirst) {
                        lock.readLock().unlock();
                        lock.writeLock().lock();
                        try {
                            IPersistency persistency = MongoPersistency
                                    .getInstace();
                            Collection<String> publisher = persistency
                                    .getPublisher(null, null);
                            if (publisher == null) {
                                return;
                            }
                            for (String source : publisher) {
                                try {
                                    AbstractMessagePublisher metadata = PublishProcessorFactory
                                            .createOutputProcessor(source);
                                    String tenantid = metadata.getTenantid();
                                    //~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
                                    addPublisher(tenantid, metadata);
                                } catch (CPSException e) {
                                    logger.error("Error Publisher metadata stored in mongo:"
                                            + source);
                                    lock.readLock().lock();
                                    throw new CPSException(
                                            ErrorCode.INVALID_INPUT_ERROR,
                                            "Error Publisher metadata stored in mongo"
                                                    + source);
                                }
                            }
                            isFirst = false;
                            lock.readLock().lock();
                        } finally {
                            lock.writeLock().unlock();
                        }
                    }
                } finally {
                    lock.readLock().unlock();
                }
            } catch (CPSException e) {
                logger.error("Failed to get Publisher metadata stored in mongo.");
                logger.error(Throwables.getStackTraceAsString(e));
            }
        }

        private void addPublisher(String tenantid,
                                  AbstractMessagePublisher metadata) {
            if (tenantid2PFactory.get(tenantid) == null) {
                tenantid2PFactory.put(tenantid, new PublishProcessorFactory());
            }
            try {
                tenantid2PFactory.get(tenantid).addPublishProcessor(metadata.toString());
            }catch (CPSException e){
                logger.error("Failed to add Publisher metadata.");
                logger.error(Throwables.getStackTraceAsString(e));
            }
        }

        public void deletePublisher(String tenantid,
                                    AbstractMessagePublisher publisher) throws CPSException {
            cachePublisher();
            if (tenantid2PFactory.get(tenantid) == null
                    || !tenantid2PFactory.get(tenantid).containsPublisher(
                    publisher.getName())) {
                throw new CPSException(ErrorCode.INVALID_REQUEST_CODE,
                        "Data source " + publisher.getName()
                                + " was not existing.");
            }
        try {
            tenantid2PFactory.get(tenantid).deletePublishProcessor(publisher.getName(),tenantid);
        }catch (CPSException e) {
            logger.error("Failed to delete Publisher metadata");
            logger.error(Throwables.getStackTraceAsString(e));
        }
            logger.info("Successfully deleted data source " + publisher.getProcessorMetadta());
        }

        public static void main(String[] args) throws DecoderException {
            Hex.decodeHex("1234567890123456789012".toCharArray());
        }


}
