package org.zjt.spark.traffic

import java.util.Properties

import com.alibaba.fastjson.JSONObject
import kafka.producer.{KeyedMessage, Producer, ProducerConfig}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * DESC    
  *
  **/
object SendTrafficMeg extends App {
  val conf = new SparkConf().setAppName("SendTrafficMeg").setMaster("local[2]")
  val sc = new SparkContext(conf)

  // TODO: 所有的行为数组。将每行内的内容利用逗号拆分
  val lines = sc.textFile("./2014082013_all_column_test.txt").filter(!_.startsWith(";")).map(_.split(",")).collect()
  println("查询到的行数目%s ".format(lines.length))

  // TODO: 初始化kafka链接
  val properties = new Properties()
  properties.put("metadata.broker.list", "centos:9092")
  properties.put("serializer.class", "kafka.serializer.StringEncoder")
  properties.put("producer.type", "async")
  val config = new ProducerConfig(properties)
  val product = new Producer[String, String](config)


  lines.foreach {
    record => {
      val data = new JSONObject()
      data.put("camera_id", record(0).replace("'",""))
      data.put("car_id", record(2).replace("'",""))
      data .put("event_time", record(4).replace("'",""))
      data .put("speed", record(6).replace("'",""))
      data .put("road_id", record(13).replace("'",""))

      val mesg = new KeyedMessage[String, String]("test",data.toJSONString)
      product.send(mesg)
      println(s"成功发送meg：$data")
      Thread.sleep(100)
    }
  }



  product.close()
  sc.stop()

}
