package com.lagou.no1

import java.util.Properties

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer
import org.apache.log4j.{Level, Logger}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object KafkaProducer {
  def main(args: Array[String]): Unit = {
    //设置控制台日志打印级别
    Logger.getLogger("org").setLevel(Level.WARN)
    //创建sc对象
    val conf = new SparkConf()
      .setAppName(s"${this.getClass.getCanonicalName}")
      .setMaster("local[*]")
    val sc = new SparkContext(conf)

    //kafka的地址
    val brokers = "node01:9092,node02:9092,node03:9092"
    //主题名称
    val topic = "sample1"
    val prop = new Properties()
    //kafka相关配置
    prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers)
    prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
    prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
    //读取文件，生成RDD
    val dataRDD: RDD[String] = sc.textFile("data/sample.log")
    //遍历RDD里的每一条数据
    dataRDD.foreach{line =>
      println(line)
      //发送到kafka中
      val producer = new KafkaProducer[String,String](prop)
      val msg = new ProducerRecord[String,String](topic,"s1",line.toString)
      producer.send(msg)
      producer.close()
    }


  }
}
