package cn.lagou.spark.work1

import java.util.Properties

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * 读取文件中数据，写入kafka
 */
object FileSendToKafka {
  def main(args: Array[String]): Unit = {
    //要在命令行创建两个topic
//    kafka-topics.sh --zookeeper linux121:2181/myKafka --create --topic mytopic1 --partitions 1 --replication-factor 1
//    kafka-topics.sh --zookeeper linux121:2181/myKafka --create --topic mytopic2 --partitions 1 --replication-factor 1

    //初始化SparkContext
    val conf = new SparkConf().setAppName(this.getClass.getCanonicalName.init).setMaster("local[*]")
    val sc: SparkContext = new SparkContext(conf)
    sc.setLogLevel("warn")

    //初始化kafka producer参数
    val prop = getKafkaProducerParameters()
    //读取文件
    val lines: RDD[String] = sc.textFile("./data/sample.log")
    lines.foreachPartition{iter =>
      //创建kafka KafkaProducer 用于发送消息
      val producer = new KafkaProducer[String, String](prop)
      //便利每一行发送到kafka
      iter.foreach{line =>
        val pr: ProducerRecord[String, String] = new ProducerRecord[String, String]("mytopic1", line)
        producer.send(pr)
      }
      producer.close()
    }
    sc.stop()
  }

  def getKafkaProducerParameters(): Properties = {
    val prop = new Properties()
    prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "linux121:9092")
    prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
    prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
    prop
  }


}
