package com.hph.springcloudstreamkafkademo.raw.api;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.serialization.StringSerializer;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;

/**
 * kafka API的使用方式（用于kafka api测试）
 * @author HPH
 * @date 2019/7/27
 */
public class KafkaProducerDemo {

    public static void main(String[] args) throws ExecutionException, InterruptedException {

        Map<String,Object> map = new HashMap<>(2);
        map.put("bootstrap.servers","localhost:9092");
        map.put("key.serializer",StringSerializer.class);
        map.put("value.serializer",StringSerializer.class);
        //创建Kafka Producer
        KafkaProducer<String,String> kafkaProducer = new KafkaProducer<>(map);

        //创建消息
        ProducerRecord<String,String> producerRecord = new ProducerRecord<>("hph",
                0,System.currentTimeMillis(),"message-test","测试");

        //发送消息
        Future<RecordMetadata> metadataFuture =  kafkaProducer.send(producerRecord);

        //因为是返回Feture，异步执行，通过get方法强制执行
        metadataFuture.get();
    }
}
