package job;

import java.util.ArrayList;
import java.util.List;
import java.util.Properties;

import utils.PropertyUtil;
import utils.SystemParam.Topic;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;

public final class ProducerFactory {
	
	/**通用生产者*/
	private static Producer<String, String> commonProducer;

	/**
	 * 根据主题获取对应的Producer
	 * @param topic
	 * @return
	 */
	public synchronized static Producer<String, String> getProducer(Topic topic){
		switch(topic){
		case ErrorConsumer:
		case ErrorConsumer_gansutest:
		default:
			if(null != commonProducer){
				return commonProducer;
			}
			commonProducer = new Producer<String, String>(new ProducerConfig(getProp(topic)));
			return commonProducer;
		}
	}
	/**
	 * 根据主题获取对应的Producer
	 * @param topic
	 * @return
	 */
	public synchronized static Producer<String, String> getProducer(String topic){
		
		if(null != commonProducer){
			return commonProducer;
		}
		commonProducer = new Producer<String, String>(new ProducerConfig(getProp(topic)));
		return commonProducer;
	
	}
	/**
	 * 发送消息
	 * @param message 单个消息内容
	 */
	public static void send(Topic topic,String message){
		getProducer(topic).send(new KeyedMessage<String, String>(topic.name(), message));
	}
	/**
	 * 发送消息
	 * @param message 单个消息内容
	 */
	public static void send(String topic,String message){
		getProducer(topic).send(new KeyedMessage<String, String>(topic, message));
	}
	
	/**
	 * 发送消息
	 * @param messagesList 消息内容集合
	 */
	public static void send(Topic topic,List<String> messagesList){
		List<KeyedMessage<String, String>> messages = new ArrayList<KeyedMessage<String, String>>();
		for(String message :messagesList){
			messages.add(new KeyedMessage<String, String>(topic.name(), message));
		}
		getProducer(topic).send(messages);
	}
	/**
	 * 根据主题获取配置文件
	 * @param topic
	 * @return
	 */
	private static Properties getProp(Topic topic){
		String kafkaUrl = PropertyUtil.getProperty("kafka.url");
		Properties prop = new Properties();
		switch(topic){
		case ErrorConsumer:
		case ErrorConsumer_gansutest:
		default:
			prop.put("serializer.class", "kafka.serializer.StringEncoder");//
			prop.put("metadata.broker.list", kafkaUrl);
			prop.put("request.required.acks", "1");// kafka会发送确认收到信息
			prop.put("message.send.max.retries", "5");// 收到 error ack 重发次数
			prop.put("producer.type", "sync");// 同步发送
			prop.put("batch.num.messages", "100");// 消息在producer缓存的条数producer.type=async有效
			prop.put("queue.buffering.max.ms", "5000");// async模式下消息缓存的时间,跟batch.num.messages不冲突，哪个先达到条件哪个先触发
			prop.put("compression.codec", "gzip");// 消息压缩格式
			return prop;
		}
	}
	/**
	 * 根据主题获取配置文件
	 * @param topic
	 * @return
	 */
	private static Properties getProp(String topic){
		String kafkaUrl = PropertyUtil.getProperty("kafka.url");
		Properties prop = new Properties();
		prop.put("serializer.class", "kafka.serializer.StringEncoder");//
		prop.put("metadata.broker.list", kafkaUrl);
		prop.put("request.required.acks", "1");// kafka会发送确认收到信息
		prop.put("message.send.max.retries", "5");// 收到 error ack 重发次数
		prop.put("producer.type", "sync");// 同步发送
		prop.put("batch.num.messages", "100");// 消息在producer缓存的条数producer.type=async有效
		prop.put("queue.buffering.max.ms", "5000");// async模式下消息缓存的时间,跟batch.num.messages不冲突，哪个先达到条件哪个先触发
		prop.put("compression.codec", "gzip");// 消息压缩格式
		return prop;
	
	}
}
