package vip.shuai7boy.trafficStreaming

import java.util.Properties

import net.sf.json.JSONObject
import net.sf.json.util.JSONBuilder
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.spark.{SparkConf, SparkContext}


/**
 * 向Kafaka中生产汽车摄像信息
 * （其中包括了汽车速度，车牌号，卡口ID，道路ID，拍摄时间等）
 */
object KafkaDataProducer {
  def main(args: Array[String]): Unit = {
    val topic = "car_events";
    val props = new Properties();
    props.put("bootstrap.servers", "tuge1:9092")
    props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")
    val producer = new KafkaProducer[String, String](props);
    val conf = new SparkConf().setMaster("local[4]").setAppName("trafficStreamingObj")
    val sc = new SparkContext(conf)

    val records: Array[Array[String]] = sc.textFile("./data/carFlow_all_column_test.txt")
      .filter(!_.startsWith(";"))
      .filter(one => {
        !"00000000".equals(one.split(",")(2))
      })
      .filter(_.split(",")(6).toInt != 255)
      .filter(_.split(",")(6).toInt != 0)
      .map(_.split(",")).collect()


    for (i <- 1 to 1000) {
      for (record <- records) {
        // prepare event data
        val event = new JSONObject()
        event.put("camera_id", record(0))
        event.put("car_id", record(2))
        event.put("event_time", record(4))
        event.put("speed", record(6))
        event.put("road_id", record(13))
        // produce event message
        println(event.toString())
        producer.send(new ProducerRecord[String, String](topic, event.toString))
        Thread.sleep(200)
      }
    }
    sc.stop()


  }

}
