import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer
import org.apache.log4j.{Level, Logger}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import java.util.Properties

//将sample.log的数据发送到Kafka队列中
object Producer {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org").setLevel(Level.ERROR)
    val conf: SparkConf = new SparkConf().setAppName(this.getClass.getCanonicalName)
      .setMaster("local[*]")
    val sc = new SparkContext(conf)

    //读文件
    val logRDD: RDD[String] = sc.textFile("data/sample.log")

    // 定义 kafka 参数
    val brokers = "linux121:9092"
    val topic = "topic01"
    val prop = new Properties()

    prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers)
    prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
    prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])

    logRDD.foreachPartition{ lines =>
      // KafkaProducer
      val producer = new KafkaProducer[String, String](prop)
      lines.foreach{ line =>
        val msg = new ProducerRecord[String, String](topic, line)
        //将sample.log的数据发送到Kafka中
        producer.send(msg)
        println(s"$line")
        Thread.sleep(2000)
      }
      producer.close()
    }

    sc.stop()
  }
}
