package com.lagou.spark.part01

import java.util.Properties

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}

/**
 * @author: yehw
 * @date: 2020/11/12 18:09
 * @description: 作业第一题 kafka生产者
 */
object KafkaProducer {
  def main(args: Array[String]): Unit = {
    println("测试启动成功")
    Logger.getLogger("org").setLevel(Level.ERROR)
    val conf = new SparkConf().setAppName(this.getClass.getCanonicalName).setMaster("local[*]")
    val ssc = new StreamingContext(conf, Seconds(5))
    val context = ssc.sparkContext
    // 定义 kafka 参数
    //开始读取文件
    val lines = context.textFile("data/spark02/sample.log")
    val words = lines.map(line => {
      val arrs = line.replace(",", "|")
      val str = arrs.replace("<<<!>>>", "")
      str
    })
    words.foreach(word => {
      val brokers = "linux121:9092,linux122:9092,linux123:9092"
      val topic = "topicB"
      val prop = new Properties()
      prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers)
      prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
      prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
      // KafkaProducer
      val producer = new KafkaProducer[String, String](prop)
      val msg = new ProducerRecord[String, String](topic, word, word)
      producer.send(msg)
      println(s"消息发送成功$word")
    })
  }
}