package util

import org.apache.kafka.clients.producer.{Callback, KafkaProducer, ProducerRecord, RecordMetadata}

import java.util.Properties
import scala.io.BufferedSource

object KafkaProducer {

  def main(args: Array[String]): Unit = {


    val prop = new Properties()
    prop.setProperty("bootstrap.servers", "127.0.0.1:9092")
    prop.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    prop.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")

    val producer: KafkaProducer[String, String] = new KafkaProducer(prop)


    val source: BufferedSource = io.Source.fromFile("/Users/sevenhong/code/flink_demo/data/carFlow_all_column_test.txt")
    val iners: Iterator[String] = source.getLines()


    for(i <- 1 to 100) {
      for (line <- iners) {
        val splits: Array[String] = line.split(",")
        val monitorId: String = splits(0).replace("'","")
        val carId: String = splits(2).replace("'","")
//        val timestamp: String = splits(4).replace("'","")
        val timestamp: String = System.currentTimeMillis().toString
        val speed: String = splits(6)

        val builder = new StringBuilder
        builder.append(monitorId).append("\t")
          .append(carId).append("\t")
          .append(timestamp).append("\t")
          .append(speed).append("\t")
          .append(speed)

        producer.send(new ProducerRecord[String, String]("flink-test", i + "", builder.toString()), new Callback {
          override def onCompletion(metadata: RecordMetadata, exception: Exception): Unit = {
            if (exception != null) {

            } else {
              println("ok...")
            }

            println("offsets:" + metadata.offset())
          }
        })

        Thread.sleep(1000)
      }
    }

    producer.close()

  }

}
