package com.aura.spark.streaming

import java.util.Properties

import com.aura.config.Config
import com.aura.util.FileUtil
import org.apache.kafka.clients.producer.ProducerRecord
//import kafka.producer.{KeyedMessage, Producer, ProducerConfig}
import org.apache.kafka.clients.producer.{KafkaProducer}

object KafkaLogsProducer {

  def main(args: Array[String]): Unit = {

    val topic = Config.topic
    val brokers = Config.brokerList
    val props = new Properties()
    props.put("metadata.broker.list", brokers)
    props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")


//    val kafkaConfig = new ProducerConfig(props)
//    val producer = new Producer[String, String](kafkaConfig)
    val producer = new KafkaProducer[String,String](props)


    (1 to 7).map(i => s"spark-project/analysis/data/logs/aura2016120$i.log").foreach { path =>
      println(s"read log from $path and replay to kafka $topic")
      FileUtil.readFileAsLines(path).foreach( line => {
        producer.send(new ProducerRecord[String, String](topic, line))
        Thread.sleep(100);
      })
    }
  }

}
