import java.util.Properties

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object KafkaProducer {
  def main(args: Array[String]): Unit = {

    // 创建SparkContext
    val sparkConf = new SparkConf().setAppName(this.getClass.getCanonicalName.init).setMaster("local[*]")
    sparkConf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    sparkConf.registerKryoClasses(Array(
      classOf[Array[org.apache.kafka.clients.producer.ProducerRecord[String,String]]]
    ))
    val sc = new SparkContext(sparkConf)
    sc.setLogLevel("WARN")

    // 定义 kafka 参数
    val brokers = "linux121:9092,linux122:9092,linux123:9092"
    val topic = "lagou_topic01"

    val prop = new Properties()

    prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers)
    prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
    prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])

    // KafkaProducer


    val text: RDD[String] = sc.textFile("data/sample.log").map(str => str.replaceAll("<<<!>>>", ""))

    // 我们不能将KafkaProducer的新建任务放在foreachPartition外边，因为KafkaProducer是不可序列化的（not serializable）
    text.foreachPartition(iter => {
      val producer = new KafkaProducer[String, String](prop)
      iter.foreach(line => {
        val msg: ProducerRecord[String, String] = new ProducerRecord[String, String](topic, line.split(",").filter(!_.isEmpty).mkString(" | ").toString)
        // 发送消息
        println(msg)
        producer.send(msg)
      })
      producer.close()
    })

  }
}