package com.lagou.sparkstreaming
/*
  创建kafka producer，读取sample.log文件，发送给topic1
 */
import java.util.Properties

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer
import org.apache.log4j.{Level, Logger}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object KafkaProducer {
  def main(args: Array[String]): Unit = {
    //创建sparkcontext
    Logger.getLogger("org").setLevel(Level.ERROR)
    val conf: SparkConf = new SparkConf().setAppName(this.getClass.getCanonicalName.init).setMaster("local[*]")
    val sc = new SparkContext(conf)

    //读取sample.log
    val lines: RDD[String] = sc.textFile("data/sample.log")

    //设置kafka producer参数
    val prop = new Properties()
    prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"server1:9092")
    prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,classOf[StringSerializer])
    prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,classOf[StringSerializer])

    // 将数据发送到topic1
    lines.foreachPartition{
      iter =>
        val producer = new KafkaProducer[String, String](prop)
        iter.foreach{
          line =>{
            val record = new ProducerRecord[String, String]("topic1", line)
            producer.send(record)
          }
        }
        producer.close()
    }
  }

}
