package com.haozhen.stream.homework

/**
  * @author haozhen
  * @email haozh@ync1.com
  * @date 2021/1/15  0:03
  */
object KafkaProducer {

  def main(args: Array[String]): Unit = {
    import java.util.Properties

    import org.apache.kafka.clients.producer.ProducerConfig
    import org.apache.kafka.common.serialization.StringSerializer
    import org.apache.log4j.{Level, Logger}
    import org.apache.spark.rdd.RDD
    import org.apache.spark.{SparkConf, SparkContext}
    Logger.getLogger("org").setLevel(Level.ERROR)

    val conf = new SparkConf().setAppName(this.getClass.getCanonicalName.init).setMaster("local[*]")
    val sc = new SparkContext(conf)

    val lines: RDD[String] = sc.textFile("data/sample.log")

    val prop = new Properties()
    prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"test1:9092")
    prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,classOf[StringSerializer])
    prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,classOf[StringSerializer])

    lines.foreachPartition(iter =>{
      import org.apache.kafka.clients.producer.KafkaProducer
      val producer  = new KafkaProducer[String,String](prop)
      iter.foreach(line =>{
        import org.apache.kafka.clients.producer.ProducerRecord
        val record = new ProducerRecord[String,String]("topic1",line)
        producer.send(record);
      })
      producer.close()
    })


  }
}
