package com.loong.kafka.scala.producer

import com.loong.kafka.java.producer.LogProducer

import scala.io.Source

/**
  * Created by 梁浩峰 on 2016/9/10 15:44.
  */
object SyslogProducer {
  def readDataFile(filePath: String): Unit = {
    val data = Source.fromFile(filePath).getLines()
    var count = 0
    data.foreach { str =>
      Thread.sleep(500)
      count = count + 1
      val producer = LogKafkaProducer.apply
      producer.send("ws2", str)
      println(count +" : "+ str)
    }
    println(count)
  }


  def sendWordsData2Kafka(): Unit = {
    val words = Array("Hello sky ", "word Submitted checkpoint of time", "scala org apache hadoop fs FilterFileSystem", "storm INFO BlockManager", "zookeeper INFO DAGScheduler", "spark whose tasks have all completed")
    val producer = new LogProducer()

    while (true) {
      val index = (math.random * words.length).toInt
      Thread.sleep(5000)
      println(words.apply(index))
      producer.send("ws2", words.apply(index))
    }
  }

  def main(args: Array[String]): Unit = {
//    val start = System.currentTimeMillis()
    readDataFile("D:/logdata/oldlog")
    //    sendWordsData2Kafka()
//    println(System.currentTimeMillis() - start)
  }

}
