package cn.lagou.test

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import java.util.Properties

object ReadLogData {
  def main(args: Array[String]): Unit = {
    // 创建SparkContext
    val conf = new SparkConf().setAppName(this.getClass.getCanonicalName.init).setMaster("local[*]")
    val sc = new SparkContext(conf)
    sc.setLogLevel("WARN")

    val loglines: RDD[String] = sc.textFile("file:///F:\\lagou\\lagouhomework\\stage_4_module_3\\1.KafkaStream\\data\\sample.log")
    val newlies = loglines.map(_.replaceAll("\\s+", "\\|")).collect()
    newlies.foreach(println)

    val brokers = "linux121:9092,linux122:9092,linux123:9092"
    val topic = "topicB"
    val prop = new Properties()

    prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers)
    prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
    prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])

    // KafkaProducer
    val producer = new KafkaProducer[String, String](prop)
    var i: Int = 0;
    newlies.foreach{
      case line=>
        val msg = new ProducerRecord[String, String](topic, i.toString, line)
        // 发送消息
        producer.send(msg)
        i+=1
        Thread.sleep(1000)
    }

    producer.close()
    // 关闭SparkContext
    sc.stop()
  }
}
