package cn.bigdata.sparkstraming.job

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer
import org.apache.spark.internal.Logging


import java.util.Properties

object SampleLogSender extends Logging {

  private val brokers = "centos1:9092,centos2:9092,centos3:9092"

  private val prop = new Properties()

  prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers)
  prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
  prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])

  private val kafkaProducer = new KafkaProducer[String, String](prop)

  def sendLog(topic: String, value: String): Unit = {
    val msg = new ProducerRecord[String, String](topic, "", value)
    val metadata = kafkaProducer.send(msg).get()
    println(s"partition:${metadata.partition()}, offset:${metadata.offset()}, size:${value.length}")
  }

  def main(args: Array[String]): Unit = {
    SampleLogSender.sendLog("test", "xxxx")
  }
}
