package com.shujia.flink.dx

import java.util.Properties

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}

import scala.io.Source

object Demo1DataToKafka {
  def main(args: Array[String]): Unit = {

    /**
      * 1、创建生产者
      *
      */

    val properties = new Properties()

    //1、kafka broker列表
    properties.setProperty("bootstrap.servers", "master:9092,node1:9092,node2:9092")

    //2、指定kv的序列化类
    properties.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    properties.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")


    val kafkaProducer = new KafkaProducer[String, String](properties)


    //读取电信数据
    val data: List[String] = Source.fromFile("data/dianxin_data").getLines().toList


    for (line <- data) {

      val record = new ProducerRecord[String, String]("dianxin1", line)

      kafkaProducer.send(record)
      kafkaProducer.flush()

      Thread.sleep(100)
    }


    kafkaProducer.close()

  }
}
