package com.newland.nxsms.utils;

import java.util.Properties;

import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;


/**
 * KafkaProducer
 * @author zhxf
 *
 */
public class KafkaProducer {


	String brokerList;
	String acksType;
	Producer<String, String> producer;

	public KafkaProducer(String brokerList) {
		this.brokerList = brokerList;
	}

	public void setAcks(String acksType) {
		this.acksType = acksType;
	}

	public void connect() {
		producer = new org.apache.kafka.clients.producer.KafkaProducer<String, String>(getProducerProps());
	}


	public void send(String topic, String msg) {
		producer.send(new ProducerRecord<String, String>(topic, msg));
	}

	private Properties getProducerProps() {
		Properties props = new Properties();
		props.put("bootstrap.servers", brokerList);
		props.put("acks", acksType);
		props.put("compression.type", "gzip");
		props.put("buffer.memory", 33554432);
		
		props.put("producer.type", "async");
        props.put("batch.num.messages", "200");
        props.put("queue.buffering.max.ms", "3000");
        props.put("queue.buffering.max.messages", "5000");
        props.put("queue.enqueue.timeout.ms", "-1");
        
		props.put("retries", 0);
		props.put("linger.ms", 1);
		props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
		props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
		return props;
	}

	public void flush() {
		producer.flush();
	}

	public void close() {
		producer.close();
	}


	public static void main(String[] args) {
//		String brokerList = args[0];
//		String topic = args[1];
//		String maps =  args[2];
//		String content = args[3];
//		int count = Integer.valueOf(args[4]);
//		System.out.println(brokerList);
//		System.out.println(topic);
//		System.out.println(maps);
//		System.out.println(content);
//		KafkaProducer kafkaProducer = new KafkaProducer(brokerList);
//		kafkaProducer.setAcks("0");
//		kafkaProducer.connect();
//		Map<String,String> map = new HashMap<>();
//		List<String> list = StringUtil.split(maps, ",");
//		for (String str:list) {
//			List<String> tmpList = StringUtil.split(str, "=");
//			map.put(tmpList.get(0), tmpList.get(1));
//		}
//		NLMessage msg = new NLMessage();
//		msg.setHeaders(map);
//		System.out.println(map);
//		msg.setBody(content.getBytes());
//		for (int i = 0;i < count;i++) {
//			kafkaProducer.send(topic, msg);
//			System.out.println(msg.getHeaders()+"======"+new String(msg.getBody()));
//		}
//		kafkaProducer.flush();
//		kafkaProducer.close();
		
		
		
//		String brokerList = "192.168.49.128:9092,192.168.49.129:9092,192.168.49.130:9092";
//		String topic = "TOPIC_NX_SMS";
//		KafkaProducer kafkaProducer = new KafkaProducer(brokerList);
//		kafkaProducer.setAcks("0");
//		kafkaProducer.connect();
//		NLMessage msg = new NLMessage();
//		msg.setHeaders(new HashMap<String, String>());
//		msg.setBody("dasbfhjklfdskluchjk".getBytes());
//		int count = 0;
//		while(true) {
//			kafkaProducer.send(topic, msg);
//			kafkaProducer.send(topic, msg);
//			count++;
//			if (count % 50 == 0)  {
//				try {
//					Thread.sleep(1000);
//				} catch (InterruptedException e) {
//					e.printStackTrace();
//				}
//			}
//		}
//		kafkaProducer.flush();
//		kafkaProducer.close();
	}
}
