package kafka_day01

import java.util.Properties

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer

import scala.tools.cmd.Property

object ProducerDome {

  def main(args: Array[String]): Unit = {

    //加载配置
    val props: Properties = new Properties()

    // 连接kafka节点
    props.setProperty("bootstrap.servers", "doit01:9092,doit02:9092,doit03:9092")
    props.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    props.setProperty("value.serializer", classOf[StringSerializer].getName) // 两种写法都行


    val topic = "test01"

    // 2 kafka的生产者
    val producer: KafkaProducer[String, String] = new KafkaProducer[String, String](props)

    for (i <- 0 to 200) {
      // 3 封装的对象
      //      val record = new ProducerRecord[String, String](topic, "123")
      val part = i % 3  // 指定数据均匀写入3个分区中
      val record = new ProducerRecord[String, String](topic, part,"doit","value:"+i)
      // 4 发送消息
      producer.send(record)
      Thread.sleep(10)
    }

    println("message send success")


    // 释放资源
    producer.close()





  }



}
