package com.shujia.flink.asyncio

import java.util.Properties

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}

import scala.io.Source

object Demo2MakeData {
  def main(args: Array[String]): Unit = {


    val properties: Properties = new Properties

    //咨询参数配置
    properties.setProperty("bootstrap.servers", "master:9092,node1:9092,node2:9092")
    //key value序列化类
    properties.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    properties.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")


    val prducer: KafkaProducer[String, String] = new KafkaProducer[String, String](properties)


    var i: Int = 0
    println("正在生成数据")
    while (i <= 1000) {
      i += 1
      Source.fromFile("flink/data/score.txt")
        .getLines()
        .toList
        .foreach(line => {

          val record: ProducerRecord[String, String] = new ProducerRecord[String, String]("async", line)

          prducer.send(record)

          Thread.sleep(10)

        })
    }
  }
}
