package org.shj.spark.application;

import java.util.Properties;
import java.util.Random;

import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;

public class DataProduceForKafka extends Thread{

	static String[] channelNames = new String[] {
			"Spark", "Scala", "Kafka", "Hadoop", "Hive",
			"Impala", "Hbase"
	};
	
	static String[] actions = new String[] {"View", "Favorite"};
	
	private String topic; //发送给Kafka的数据
	private Producer<Integer,String> producerForKafka;
	
	private Random random = new Random();
	
	public DataProduceForKafka(String topic) {
		this.topic = topic;
		Properties conf = new Properties();
		conf.put("metadata.broker.list", "ubuntu3:9092,ubuntu4:9092,ubuntu5:9092");
		conf.put("serializer.class", "kafka.serializer.StringEncoder");
		
		producerForKafka = new Producer<Integer,String>(new ProducerConfig(conf));
	}
	
	public void run() {
		for(int i = 0 ; i < 2000; i++) {
			if(i > 0 && i % 500 == 0) {
				try {
					Thread.sleep(10000);
				} catch (InterruptedException e) {
				}
			}
			int ind = random.nextInt(channelNames.length);
			String msg = channelNames[ind] + " " + actions[ind % 2];
			producerForKafka.send(new KeyedMessage<Integer, String>(topic, msg));
		}
	}
	
	public static void main(String[] args) {
		new DataProduceForKafka("shjtestTopic").start();
	}

}
