package com.ibm.cps.spark.streaming

import java.util
import java.util.{Date, Properties}

import scala.util.Random

import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;

/**
 * Created by telekinesis on 4/23/15.
 */
object KafkaTestDataWriter {
  val defaultDataCount = 10000L
  val defaultInterval = 1000L

  def main(args: Array[String]) {
    val topic = "TEST_SPARK_DATA"
    val kafkaProperties = new Properties()
    kafkaProperties.put("metadata.broker.list", "9.186.88.253:9092")
    kafkaProperties.put("request.required.acks", "1")
    val producerConfig = new ProducerConfig(kafkaProperties)
    val kafkaProducer = new Producer[Int, Array[Byte]](producerConfig)
    val count = if(args.length >= 1) args(0).toLong else defaultDataCount
    val interval = if(args.length >= 2) args(1).toLong else defaultInterval
    val rand = new Random()
    while(true){
      val messages = new util.LinkedList[KeyedMessage[Int, Array[Byte]]]
      for(i <- 1L to count){
        val value1 = rand.nextInt(1000)
        val value2 = rand.nextDouble() * 1000
        val value3 = rand.nextDouble() * 1000
        val messageString = "%d,%f,%f".format(value1, value2, value3)
        val newMessage = new KeyedMessage[Int, Array[Byte]](topic, messageString.getBytes)
        messages.add(newMessage)
      }
      kafkaProducer.send(messages)
      println("all data sent", new Date())
      Thread.sleep(interval)
    }
  }
}
