package org.niit.kafka;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.serialization.StringSerializer;


import java.util.Properties;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;

/*
  利用Java 去连接Kafka 进行数据的生产
 */
public class KafkaProducerTest {


    public static void main(String[] args) throws ExecutionException, InterruptedException {

        //1.连接Kafka的配置
        Properties props = new Properties();
        props.setProperty("bootstrap.servers","node1:9092");
        props.put("acks","all");//应答模式
        props.put("enable.idempotence",true);//开启幂等性，保证生产数据的一致性和不可重复性
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());//因为我们生产的数据是字符串，但是Kafka接收的数据是字节数组，所以要这里配置转换
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,StringSerializer.class.getName());

        //2.创建生产者对象
        KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(props);

        //3.生产数据
        for (int i = 1;i<=100;i++){
            ProducerRecord<String, String> record = new ProducerRecord<>("test", null, i + "");
            Future<RecordMetadata> send = kafkaProducer.send(record);
            //发送成功的应答
            send.get();
        }

        //4.关闭Kafka连接
        kafkaProducer.close();

    }


}
