package com.atguigu.pro

import java.util.Properties

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}

import scala.io.BufferedSource

/**
 * @description: xxx
 * @time: 2020/8/4 18:12
 * @author: baojinlong
 **/
object KafkaProducerUtils {

  def main(args: Array[String]): Unit = {
    write2KafkaWithTopic2()
  }

  def write2KafkaWithTopic2(): Unit = {
    // 从文件中读取数据逐条发送
    val bufferedSource: BufferedSource = io.Source.fromFile("E:/qj_codes/big-data/FlinkTutorial/HotItemsAnalysis/src/main/resources/UserBehavior-short.csv")
    bufferedSource.getLines().foreach(println)
  }

  def write2KafkaWithTopic(topic: String): Unit = {
    val properties = new Properties()
    properties.setProperty("bootstrap.servers", "localhost:9092")
    properties.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    properties.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    // 创建一个KafkaProducer用它来发送数据
    val producer = new KafkaProducer[String, String](properties)
    // 从文件中读取数据逐条发送
    val bufferedSource: BufferedSource = io.Source.fromFile("E:/qj_codes/big-data/FlinkTutorial/HotItemsAnalysis/src/main/resources/UserBehavior-short.csv")
    for (elem <- bufferedSource.getLines()) {
      val record = new ProducerRecord[String, String](topic, elem)
      producer.send(record)
    }
    producer.close()
  }
}
