package myapps2;

import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.serialization.StringSerializer;

import java.time.LocalDateTime;
import java.util.Properties;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;

public class SimpleProducer {
	public static void main(String[] args) {
		Properties props = new Properties();
		props.put("bootstrap.servers", "192.168.0.150:9092");
		props.put("acks", "all");
		props.put("retries", 10);
		props.put("batch.size", 16384);
		props.put("key.serializer", StringSerializer.class.getName());
		props.put("value.serializer", StringSerializer.class.getName());
		KafkaProducer producer = new KafkaProducer<String, String>(props);
		int messageNo = 1;
		String topic = "KAFKA_TEST";
		String messageStr="你好，这是第"+messageNo+"条数据-"+ LocalDateTime.now();
		try{
			ProducerRecord<String, String> record = new ProducerRecord<>(topic, "msg5", messageStr);
			//同步发送
//			Future msg5 = producer.send(new ProducerRecord<String, String>(topic, "msg5", messageStr));
//			RecordMetadata metadata = (RecordMetadata) msg5.get();
//			System.out.println(metadata.topic()); //KAFKA_TEST
//			System.out.println(metadata.partition());  // 0
//			System.out.println("offset:"+metadata.offset()); // offset:3056

			//异步发送
			//异常处理
			producer.send(record, new Callback() {
				@Override
				public void onCompletion(RecordMetadata metadata, Exception exception) {
					if(exception == null){
						System.out.println(metadata.partition()+"::"+metadata.offset());
					}
				}
			});
		}finally {
			producer.close();
		}
	}
}
