package com.chenjl.producer;

import java.util.Properties;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.chenjl.Constants;

import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
/**
 * kafka生产者
 * 		advertised.host.name=192.168.245.129
 * 	Old Producer Client API 生产者直连broker，不需要zk
 * 
 * 2016-9-12 15:20:57
 * @author chenjinlong
 */
@SuppressWarnings("deprecation")
public class Producer01 {
	private static final Logger logger = LoggerFactory.getLogger(Producer01.class);
	
	public static void main(String[] args) {
		Properties props = new Properties();
		props.put("metadata.broker.list","192.168.245.129:9092");
		//准备传递数据给broker时使用序列化器
		props.put("serializer.class","kafka.serializer.StringEncoder");
		props.put("key.serializer.class","kafka.serializer.StringEncoder");
		//0 不等待broker返回
		//1 leader接收了数据，既返回；如leader接收数据未来得及复制，数据将丢失
		//all 所有sync broker都接收到了数据，可靠性最高
		props.put("request.required.acks","1");
		
		ProducerConfig producerConfig = new ProducerConfig(props);
		Producer<String,String> producer = new Producer<String, String>(producerConfig);
		
		logger.info("send begin ");
		for(int i=1;i<=10;i++) {
			String key = "1key~"+i;
			String value = "1value#"+i;
			
			KeyedMessage<String,String> keyedMessage = new KeyedMessage<String, String>(Constants.TOPIC_NAME,key,value);
			producer.send(keyedMessage);
			
			logger.info("发送完成 , "+i);
			try {
				Thread.sleep(100);
			}
			catch (InterruptedException e) {
				e.printStackTrace();
			}
		}
		
		producer.close();
	}
}