package com.atguigu.spark.util.kafka

import com.atguigu.spark.util.util.PropUtil
import org.apache.kafka.clients.producer.{Callback, KafkaProducer, ProducerConfig, ProducerRecord, RecordMetadata}

import java.util.Properties
import java.util.concurrent.TimeUnit
import scala.collection.mutable.ArrayBuffer
import scala.util.Random

object Kafka_Producer {

  def getRandomData(): Array[String] = {
    val buffer: ArrayBuffer[String] = ArrayBuffer[String]()
    val areaArray: Array[String] = Array("华东", "华南", "华北", "西部")
    val cityArray: Array[String] = Array("北京", "上海", "杭州", "山东")

    for (i <- 1 to Random.nextInt(20)) {
      // timestamp area city userid adid
      // 数据发送格式
      val ts: Long = System.currentTimeMillis()
      val area: String = areaArray(Random.nextInt(4))
      val city: String = cityArray(Random.nextInt(4))
      val userid: Int = Random.nextInt(6) + 1
      val adid: Int = Random.nextInt(6) + 1
      buffer.append(s"$ts $area $city $userid $adid")
    }
    buffer.toArray
  }


  def main(args: Array[String]): Unit = {
    val prop = new Properties()
    prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, PropUtil("kafka.brokers"))
    prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
    prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer")
    // timestamp area city userid adid
    // 数据发送格式
    val producer: KafkaProducer[String, String] = new KafkaProducer[String, String](prop)
    while (true) {
      for (line <- getRandomData()) {
        producer.send(new ProducerRecord[String, String](PropUtil("kafka.topic"), line),new Callback {
          override def onCompletion(metadata: RecordMetadata, exception: Exception): Unit = {

          }
        })
      }
      TimeUnit.SECONDS.sleep(2)
    }
  }
}
