package com.shihb;

import java.util.Properties;
import java.util.concurrent.ExecutionException;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;

/**
 *Description:生产者api
 *Version:1.0.0
 *@author shihb
 *@date 2020/6/7 12:23
 */
public class ProducerDemo {

  public static void main(String[] args) throws ExecutionException, InterruptedException {

    //创建配置对象
    Properties props = new Properties();
    //集群配置
    props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "node05:9092");
    //kafka ack应答机制 0 不需要，1需要leader应答，-1需要所有副本应答
    props.put(ProducerConfig.ACKS_CONFIG, "all");
    //重试次数ProducerConfig.RETRIES_CONFIG
    //延迟提交ProducerConfig.LINGER_MS_CONFIG
    //批处理大小ProducerConfig.BATCH_SIZE_CONFIG
    //缓存大小ProducerConfig.BUFFER_MEMORY_CONFIG
    //序列化
    props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
    props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
    //****.log message格式：offset(8B)+size(4B)+message(Record)
    Producer<String, String> producer = new KafkaProducer<>(props);
    for (int i = 0; i < 10; i++) {
      // 封装数据
      ProducerRecord<String, String> record = new ProducerRecord<>("TEST", 0,Integer.toString(i),
          Integer.toString(i));
//      同步发送
//      producer.send(record).get();
      // 异步发送,可以跟一个回调函数获取发送成功的元数据信息
      producer.send(record,(meta,e)->System.out.println(meta.toString()));

    }
    // 资源关闭
    producer.close();
  }
}
