package sparkStream

import java.util.Properties
import java.util.Random
import org.apache.kafka.clients.producer.KafkaProducer
import org.apache.kafka.clients.producer.ProducerRecord

object kfkDemo {//java的scala版本
  def main(args: Array[String]): Unit = {
    // 1. 创建 kafka 生产者的配置对象
    val properties = new Properties()
    // 2. 给 kafka 配置对象添加配置信息：bootstrap.servers
    properties.put("bootstrap.servers", "192.168.136.128:9092")
    properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")




    // 每隔 5 秒推送 5 条数据 true 是死循环
    while (true) {
      // 3. 创建 kafka 生产者对象
      val kafkaProducer = new KafkaProducer[String, String](properties)
      println("开始发送数据==========================")
      try {
        // 4. 调用 send 方法,发送消息
        for (i <- 0 until 5) {
          val rd = new Random()
          // 品类
          val categoryArray = Array("连衣裙", "帆布鞋", "YSL", "耐克", "数码")
          val radom = rd.nextInt(4)
          val category = categoryArray(radom)
          // 模拟数据
          println("发送数据为" + category)
          try {
            kafkaProducer.send(new ProducerRecord[String, String]("t15", category))
          } catch {
            case e: Exception => e.printStackTrace()
          }
        }
        Thread.sleep(1000)
      } catch {
        case e: InterruptedException => e.printStackTrace()
      }
      // 5. 关闭资源
      kafkaProducer.close()
    }
  }
}