package com.cnebula.dataprocess.hive.job.producer;

import javax.annotation.Resource;

import org.apache.rocketmq.spring.starter.core.RocketMQTemplate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;

import com.cnebula.dataprocess.common.hive.HiveDataPassJobResult;
import com.cnebula.dataprocess.common.hive.HiveLoadDataJobResult;
import com.cnebula.dataprocess.common.hive.HivePurgeJobResult;
import com.google.gson.Gson;

@Component
public class HiveJobProducer {

	private static final Logger log = LoggerFactory.getLogger(HiveJobProducer.class);
	private Gson gson = new Gson();

	@Resource
	private RocketMQTemplate rocketMQTemplate;

	public void sendLoadJobMessage(HiveLoadDataJobResult msg) {
		log.info("===========================send hive load job message================================");
		log.info(gson.toJson(msg));
		rocketMQTemplate.convertAndSend("hive-loaddata-job-topic", msg);
		log.info("=====================================================================================");
	}
	
	public void sendDataPassJobMessage(HiveDataPassJobResult msg) {
		log.info("===========================send hive data pass job message===========================");
		log.info(gson.toJson(msg));
		rocketMQTemplate.convertAndSend("hive-datapass-job-topic", msg);
		log.info("=====================================================================================");
	}
	
	public void sendPurgeJobMessage(HivePurgeJobResult msg) {
		log.info("===========================send hive purge job message===============================");
		log.info(gson.toJson(msg));
		rocketMQTemplate.convertAndSend("hive-purge-job-topic", msg);
		log.info("=====================================================================================");
	}
}
