package com.mohe.shanpao.kafka;

import java.util.Properties;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;

public class MyProducer {
	
	private Producer<String, String> producer = null;
	
	public static void main(String[] args) {
		
		MyProducer mp = new MyProducer();
		mp.init();
		
		for(int i=0;i<100;i++)
		{
			ProducerRecord<String, String> record = new ProducerRecord<String, String>("ttt", "key=="+i,"value=="+i);
			Future<RecordMetadata> future = mp.producer.send(record);
			try {
				RecordMetadata metadata = future.get();
				System.out.println("offset=="+metadata.offset());
			} catch (InterruptedException e) {
				e.printStackTrace();
			} catch (ExecutionException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		
		mp.producer.close();
				
	}
	
	
	public void init()
	{
		Properties props = new Properties();
		props.put("bootstrap.servers", "192.168.91.130:9092,192.168.91.131:9092,192.168.91.133:9092");
		
		props.put("retries", 0);
		props.put("batch.size", 16384);
		props.put("linger.ms", 1);
		props.put("buffer.memory", 33554432);
		props.put("request.required.acks", "1");//# 0: producer不会等待broker发送ack # 1: 当leader接收到消息之后发送ack # 2: 当所有的follower都同步消息成功后发送ack. 
		props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
		props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
		
		producer = new KafkaProducer<String, String>(props);
		
	}
}
