package com.lagou.homework.sparkstream

import java.util.Properties

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer
import org.apache.log4j.{Level, Logger}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object KafkaProducer {
    def main(args: Array[String]): Unit = {
//        Logger.getLogger("org").setLevel(Level.WARN)
        val conf: SparkConf = new SparkConf()
                .setAppName(this.getClass.getCanonicalName.init)
                .setMaster("local[*]")
        val sc: SparkContext = new SparkContext(conf)

        val linesRDD: RDD[String] = sc.textFile("file:///F:/homework/code/spark-streaming-graphx/data/sample.log")

        //Kafka参数
        val brokers = "linux121:9092"
        val topic = "topic-spark-1"
        val prop: Properties = new Properties()
        prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,brokers)
        prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,classOf[StringSerializer])
        prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,classOf[StringSerializer])

        linesRDD.foreachPartition(iter => {
            //初始化kafkaProducer
            val producer: KafkaProducer[String, String] = new KafkaProducer[String,String](prop)
            iter.foreach{line =>
                val record: ProducerRecord[String, String] = new ProducerRecord[String,String](topic,line)
                producer.send(record)
            }
            producer.close()
        })


    }
}
