package dyyx;

import java.time.LocalDateTime;
import java.util.Date;
import java.util.Properties;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicLong;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;

public class ProducerTest {

	private static final AtomicLong seq = new AtomicLong(0);

	public static void main(String[] args) throws Exception {

		// System.out.println("hello,kafkademo");
		String server = KafkaUtil.SERVERS;
		// server = "10.57.34.20:9092";

		Properties props = new Properties();
		props.put("bootstrap.servers", server);
		// 0        发送消息后立即返回，不等待broker端响应结果 
		// 1        leader副本成功写入即可
		// all  or -1
		props.put("acks", "1");
		props.put("retries", 0);
		props.put("batch.size", 16384);
		props.put("linger.ms", 1);
		props.put("buffer.memory", 33554432);
		props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
		props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");

		Producer<String, String> producer = new KafkaProducer<String, String>(props);

		// RecordMetadata
		// topicPartition.toString() + "@" + offset
		
		Future<RecordMetadata> f = null;
		// ProducerRecord(String topic, Integer partition,K key,Vvalue)
		// ProducerRecord(String topic,K key,V value)
		String key = null;
		String value = null;
		Date now = null;
        int num = 100;
		for (int i = 0; i < num; i++) {
			now = new Date();
			key = now.getTime() + "_" + seq.getAndIncrement()+"_"+LocalDateTime.now();
			value = key + "," + now;
			f = producer.send(new ProducerRecord<String, String>(KafkaUtil.TOPIC, key, value));
			System.out.println(f.get() + ",key=" + key + ",value=" + value);
		}
		producer.close();

	}

}
