package com.example.sparkkafka

import java.util.Properties

import org.apache.kafka.clients.producer.{Callback, KafkaProducer, ProducerConfig, ProducerRecord, RecordMetadata}
import org.apache.kafka.common.serialization.StringSerializer

object KafkaProducer {
  def main(args: Array[String]): Unit = {
    // 定义 kafka 参数
    val brokers = "node1:9092,node2:9092,node3:9092"
    val topic = "topic1"
    val prop = new Properties()
    prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers)
    prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
    prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
    // KafkaProducer
    val producer = new KafkaProducer[String, String](prop)
    for (i <- 1 to 1000000) {
      val msg = new ProducerRecord[String, String](topic, i.toString, i.toString)
      // 发送消息

      val callback: Callback = new Callback {
        override def onCompletion(metadata: RecordMetadata, exception: Exception): Unit = {
          if (exception == null)
            println("主题：" + metadata.topic + "\n" + "分区：" +
              metadata.partition + "\n" + "偏移量：" + metadata.offset + "\n" +
              "序列化的key字节：" + metadata.serializedKeySize + "\n" + "序列化的value字节："
              + metadata.serializedValueSize + "\n" + "时间戳："
              + metadata.timestamp)
          else
            println("有异常：" + exception.getMessage)
        }
      }
      producer.send(msg, callback)
      println(s"i = $i")
      Thread.sleep(100)
    }
    producer.close()
  }

}
