package zk.learn.streaming

import java.util.Properties

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer

import scala.io.{BufferedSource, Source}

object KafkaProducer {
  def main(args: Array[String]): Unit = {
    // 定义 kafka 参数
    val brokers = "linux121:9092,linux122:9092,linux123:9092"
    val tpInput = "tp_spark_stream_input"
    val prop = new Properties()
    prop.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers)
    prop.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
    prop.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])

    // KafkaProducer
    val producer = new KafkaProducer[String, String](prop)


    // 读取sample.log文件
    val file: BufferedSource = Source.fromFile("code/LagouBigData/data/sample.log", "UTF-8")
    val lines: Iterator[String] = file.getLines()

    // 发送消息
    for (l <- lines){
      val msg = new ProducerRecord[String, String](tpInput, l.toString)
      println(msg)
      producer.send(msg)
    }

    producer.close()
  }
}