package org.databandtech.mockmq;

import java.util.Properties;
import java.util.concurrent.ExecutionException;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.databandtech.common.Mock;
import org.databandtech.mockmq.entity.EpgVod;
import com.google.gson.Gson;

public class EpgVodKafkaProducer {

	final static String HOST="192.168.13.52:9092";//"192.168.10.60:9092"
	final static String TOPIC="EPGVOD";
	final static int COUNT=10;    //发送的数据条数
	final static int PARTITION=0; //分区
	//对电信、移动多次重复是增加随机选择的命中率
	final static String[] SYS = {"t","t","t","t","u","m","m","m"};//电信、联通、移动
	final static String[] STBTYPE = {"huawei","mi","mi","mi","oppo","oppo"};
	final static String[] TERMINALTYPE = {"linux_STB","android_STB","PCClient","Ipad","iphone"};
	final static String	MEDIACODE_PRRFIX ="vid-"; //最终的vid是“vid-1”到“vid-100”，便于统计
	final static String[] DEFINITION = {"0","1","2","3"};

	final static String[] AREA_CODE = {"北京","北京","北京","上海","上海","上海","广州","广州","深圳","深圳","重庆","杭州","武汉","南京","郑州","西安","成都","长沙"};
	final static String[] APPS = {"腾讯","爱奇艺","优酷"};
	
	public static void main(String[] args) {

		Properties properties = new Properties();
		properties.put("bootstrap.servers", HOST);
		// 0:producer不会等待broker发送ack
        // 1:当leader接收到消息后发送ack
        // -1:当所有的follower都同步消息成功后发送ack
		properties.put("acks", "-1");
		properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
		properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");

		@SuppressWarnings("resource")
		org.apache.kafka.clients.producer.KafkaProducer<String, String> kafkaProducer = new org.apache.kafka.clients.producer.KafkaProducer<String, String>(properties);
		// -- 同步发送消息
		for (int i = 1; i <= COUNT; i++) {
			RecordMetadata metadata = null;
		    //参数1：topic名, 参数2：消息文本； ProducerRecord多个重载的构造方法
			EpgVod vod = new EpgVod();
			vod.setAction_type("vod_playing");
			vod.setArea_code(AREA_CODE[Mock.getNum(0, AREA_CODE.length-1)]);
			vod.setBitrate("2k");
			vod.setCurrentplaytime("");
			vod.setDefinition(DEFINITION[Mock.getNum(0, DEFINITION.length-1)]);
			vod.setEpg_group_id(DEFINITION[Mock.getNum(0, DEFINITION.length-1)]);
			vod.setLog_time(System.currentTimeMillis()+"");
			vod.setMediacode(MEDIACODE_PRRFIX+Mock.getNumString(100));
			vod.setRefer_page_id(DEFINITION[Mock.getNum(0, DEFINITION.length-1)]);
			vod.setRefer_type(DEFINITION[Mock.getNum(0, DEFINITION.length-1)]);
			vod.setStart_time("");
			vod.setStb_id("");
			vod.setStb_ip("192.168.1." + Mock.getNumString(254));
			vod.setUser_id("uid-" + Mock.getNumString(1000));
			vod.setUser_group_id(DEFINITION[Mock.getNum(0, DEFINITION.length-1)]);
			vod.setTerminal_type(TERMINALTYPE[Mock.getNum(0, TERMINALTYPE.length-1)]);
			vod.setSys_id(SYS[Mock.getNum(0, SYS.length-1)]);
			vod.setStb_type(STBTYPE[Mock.getNum(0, STBTYPE.length-1)]);
			vod.setStb_mac("");

			//指定分区发送
			//ProducerRecord<String, String> pr = new ProducerRecord<String, String>(TOPIC,PARTITION,"key"+i, msg +"--"+i);
			//默认分区发送
			String jsonStr = new Gson().toJson(vod);
			ProducerRecord<String, String> pr = new ProducerRecord<String, String>(TOPIC, jsonStr);
			try {
				metadata = kafkaProducer.send(pr).get();
				System.out.println(jsonStr);
				System.out.println("TopicName : " + metadata.topic() + " Partiton : " + metadata
	                    .partition() + " Offset : " + metadata.offset()+"--"+jsonStr+i);
			} catch (InterruptedException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			} catch (ExecutionException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}		    
		}

	}

}
