/*
 * Test terminal target: Kafka 接受数据并消费到hbase数据库
 * Class: 生产者 产生数据 
 * 
 * Author： Ricky Lin 
 * Data: 2017-08-01
 * 
 */

package com.kafka.hbase;
import java.util.Properties;

import org.apache.log4j.PropertyConfigurator;

import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;

public class KafkaProducer {
	 
    public static void main(String[] args) {
    	
    	System.out.println(System.getProperty("user.dir"));
    	String path = System.getProperty("user.dir");
    	String log4jConfPath = path+"\\src\\main\\resources\\log4j.properties";
    	PropertyConfigurator.configure(log4jConfPath);
    	
    	
	        Properties props = new Properties();
	        props.put("metadata.broker.list", KafkaProperties.metadataBrokerList);
	        props.put("serializer.class", "kafka.serializer.StringEncoder");
	        /*
	         * 注： props.put("serializer.class", "kafka.serializer.StringEncoder")  发送的数据是String,

                                              还可以是 二进制数组形式：        

               props.put("serializer.class", "kafka.serializer.DefaultEncoder");
               
               props.put("key.serializer.class", "kafka.serializer.StringEncoder");  如果没有这个，就代表 key也是二进制形式。

                                              生产者发送的都是keyvalue对
	         * 
	         * 
	         */
	       
	       //props.put("key.serializer.class", "kafka.serializer.StringEncoder"); 
	        
	        
	        ProducerConfig config = new ProducerConfig(props);
	        Producer<String, String> producer = new Producer<String, String>(config);
	        
	        
	        for (int i = 0; i < 10; i++){
	          producer.send(new KeyedMessage<String, String>(KafkaProperties.topic, "Ricky " + i));
            }
	        
	        
    }
}