package com.shujia.dx

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}

import java.util.Properties
import scala.io.{BufferedSource, Source}

object Demo01DataToKafka {
  def main(args: Array[String]): Unit = {

    // 1、建立连接
    val properties = new Properties()
    // kafka broker列表
    properties.setProperty("bootstrap.servers", "master:9092,node1:9092,node2:9092")
    // 指定K、V的序列化的类
    properties.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    properties.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")


    val kafkaProducer: KafkaProducer[String, String] = new KafkaProducer[String, String](properties)
    /**
     * kafka-topics.sh --zookeeper master:2181,node1:2181,node2:2181 --replication-factor 3 --partitions 3 --topic dianxin --create
     */

    // 读取文件
    val bs: BufferedSource = Source.fromFile("Flink/data/dianxin_data")
    bs.getLines().foreach(line => {
      val record: ProducerRecord[String, String] = new ProducerRecord[String, String]("dianxin", line)
      kafkaProducer.send(record)
      kafkaProducer.flush()
      //      Thread.sleep(100)
    })


    bs.close()
    kafkaProducer.close()
  }

}
