package homework1

import java.util.concurrent.Future

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord, RecordMetadata}
import org.apache.spark.{SparkConf, SparkContext}

object FileToKafka {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("readFile")
    val sc = new SparkContext(conf)
    sc.setLogLevel("warn")

    val topic1 = "topic1"
    val filePath = "data/sample.log"
    // 读取文件向topic1主题发送消息
    sc.textFile(filePath, 1).foreachPartition(
      partition => {
        partition.foreach(line => {
          val producer = ProducerUtils.getProducer
          val msg = new ProducerRecord[String, String](topic1, "", line)
          val result = producer.send(msg).get()
          println(s"topic:${result.topic()} partition:${result.partition()} offset:${result.offset()}")
          Thread.sleep(1000)
        })
      }
    )

    sc.stop()
  }


}
