package homework1

import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.streaming.{Seconds, StreamingContext}

object ReaderDStream {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("KafkaDStream").setMaster("local[*]")
    println("22222")

    val sc = new SparkContext(conf)

    val lines = sc.textFile("data/sample.log")

    // 先将读取文件格式进行转化
    val newLines = lines.map(line => {
      val strs = line.split(",")
      val key = strs(strs.size - 1).replace("<<<!>>>", "")
      (key, line)
    })
    println(newLines.take(10).toBuffer)
    val kafkaProducer = KafkaProducer.instance()
    newLines.foreach(item => {
      val message = new ProducerRecord[String, String]("topic1", item._1, item._2)
      kafkaProducer.send(message)
    })
  }

}
