
import java.util.Properties
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object KafkaProducer  {
  def main(args: Array[String]): Unit = {

    // 1、创建SparkContext
    val conf = new SparkConf().setAppName(this.getClass.getCanonicalName.init).setMaster("local[*]")
    val sc = new SparkContext(conf)
    sc.setLogLevel("WARN")
    val N = 3

    // 定义 kafka 参数
    val brokers = "linux128:9092,linux129:9092,linux130:9092"
    val topic1 = "spark_topic01"
    val prop = new Properties()

    prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers)
    prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
    prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])

    // KafkaProducer
    val producer = new KafkaProducer[String, String](prop)

    // 读取rdd数据并发送mq
    var rddData: RDD[String] = sc.textFile("file:///D:\\BaiduNetdiskDownload\\lagoubigdata\\fourthPhrase\\大数据正式班第四阶段模块一\\scala编程\\0-讲义和代码\\代码\\sparkPartTwo\\data\\sample.log")

    rddData.foreach(
        a => {
          println(a)
        }
    )
    var i = 0
    rddData.collect().foreach(a=>{
      val msg1 = new ProducerRecord[String, String](topic1, i.toString, a.toString)
      producer.send(msg1)
      i = i+1

    })
    producer.close()
  }
}