package com.ibm.cps.kafka.listener;

import java.util.HashMap;

import org.apache.log4j.Logger;

import com.google.common.base.Throwables;
import com.ibm.cps.kafka.KafkaTopicConsumeThread;
import com.ibm.cps.message.MessageFields;
import com.ibm.cps.newmessage.DataSourceMetadataFactory;
import com.ibm.util.JSONUtility;
import com.ibm.util.SimpleQueue;
import com.ibm.util.exception.CPSException;

public class ForwarderAddDataSourceListener extends KafkaTopicConsumeThread {

	private String tenantid;
	private Logger logger;
	private SimpleQueue queue;

	public ForwarderAddDataSourceListener(String zookeeper, String groupid, String metadataTopic, String tenantid,
			SimpleQueue queue) {
		super(zookeeper, groupid, metadataTopic);
		logger = Logger.getLogger(ForwarderAddDataSourceListener.class);
		this.tenantid = tenantid;
		this.queue = queue;
	}

	@Override
	public void process(byte[] message) throws CPSException {
		String input = new String(message);
		String type;
		try {
			HashMap<String, String> key2Value = JSONUtility.getKeyValue(input);
			type = key2Value.get(MessageFields.TYPE);
			if (type != null && DataSourceMetadataFactory.isDataSourceType(type)) {
				if (key2Value.get(MessageFields.TENANTID).equals(tenantid)) {
					queue.put(DataSourceMetadataFactory.parseDataSourceMetadata(JSONUtility.fromObjectString(input),
							type));
				}
			}
		} catch (CPSException | InterruptedException e) {
			logger.error(Throwables.getStackTraceAsString(e));
		}
	}

}
