package demo.kafka;

import demo.utils.JsonUtils;
import demo.utils.XdcsSparkFileUtils;
import demo.vo.StatDataPoint;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.Serializable;
import java.util.Properties;
import java.util.UUID;

/**
 * @author mandy.hu
 *
 */
public class XdcsSparkKafkaProducer implements Serializable {

	private static final long serialVersionUID = -952642287180856116L;

	private static final Logger logger = LoggerFactory.getLogger(XdcsSparkKafkaProducer.class);

	private static final String kafkaProperties = "kafka.properties";
	private static final String PRE_KEY = "xdcs.";

	private String topic;

	private String serverLoadTopic;

	private String storeTopic;

	private String compassDataPointTopic;
	
	private String logTopic;
	
	private String topicHdfs;

	private Producer<String, String> producer;

	public void send(String topic, String message) {
		String key = String.valueOf(UUID.randomUUID().getLeastSignificantBits());
		producer.send(new KeyedMessage<String, String>(topic, key, message));
	}

	public void send(String topic, StatDataPoint dataPoint) {
		String message=null;
		try {
			message = JsonUtils.toJson(dataPoint);
			this.send(topic, message);
		} catch (Exception e) {
			logger.error("XdcsSparkKafkaProducer-send json:" + message,e);
		}
	}

	public void sendLog(StatDataPoint dataPoint) {
		String message=null;
		try {
			message = JsonUtils.toJson(dataPoint);
			this.send(logTopic, message);
		} catch (Exception e) {
			logger.error("XdcsSparkKafkaProducer-send json:" + message,e);
		}
	}

	public void send(StatDataPoint dataPoint) {
		String message=null;
		try {
			message = JsonUtils.toJson(dataPoint);
			this.send(topic, message);
		} catch (Exception e) {
			logger.error("XdcsSparkKafkaProducer-send json:" + message,e);
		}
	}

	public void sendStore(StatDataPoint dataPoint) {
		String message=null;
		try {
			message = JsonUtils.toJson(dataPoint);
			this.send(storeTopic, message);
		} catch (Exception e) {
			logger.error("XdcsSparkKafkaProducer-Store-send json:" + message,e);
		}
	}

	public void sendServerLoad(StatDataPoint dataPoint) {
		String message=null;
		try {
			message = JsonUtils.toJson(dataPoint);
			this.send(serverLoadTopic, message);
		} catch (Exception e) {
			logger.error("XdcsSparkKafkaProducer-ServerLoad-send json:" + message,e);
		}
	}

	public void sendCompassDataPoint(StatDataPoint dataPoint) {
		String message=null;
		try {
			message = JsonUtils.toJson(dataPoint);
			this.send(compassDataPointTopic, message);
		} catch (Exception e) {
			logger.error("spark compassDataPointTopic-send json:" + message,e);
		}
	}

	public void send2Hdfs(StatDataPoint dataPoint) {
		String message=null;
		try {
			message = JsonUtils.toJson(dataPoint);
			this.send(topicHdfs, message);
		} catch (Exception e) {
			logger.error("XdcsSparkKafkaProducer-send json:" + message,e);
		}
		
	}

	private XdcsSparkKafkaProducer() {

		Properties properties = XdcsSparkFileUtils.load(kafkaProperties);
		this.topic = properties.getProperty("topic");
		this.topicHdfs = properties.getProperty("topicHdfs");
		this.logTopic =  properties.getProperty("topic.log");
		this.serverLoadTopic = properties.getProperty("serverLoadTopic");
		this.storeTopic = properties.getProperty("storeTopic");
		this.compassDataPointTopic = properties.getProperty("compassDataPointTopic");
		properties.put("compression.codec", "gzip");
		ProducerConfig config = new ProducerConfig(properties);
		this.producer = new Producer<String, String>(config);
	}

	private static XdcsSparkKafkaProducer xdcsSparkKafkaProducer = new XdcsSparkKafkaProducer();

	public static XdcsSparkKafkaProducer getInstance() {

		return xdcsSparkKafkaProducer;
	}
}
