import java.text.SimpleDateFormat
import java.util.Calendar

import kafka.serializer.StringDecoder
import net.sf.json.JSONObject
import org.apache.spark.SparkConf
import org.apache.spark.streaming._
import org.apache.spark.streaming.kafka._

object CarEventCountAnalytics {

  def main(args: Array[String]): Unit = {
    //sparkcore的master 默认是local[2] 2是开启两个线程  如果有输入则采用输入的mater
    var masterUrl = "local[2]"  //因为是在本地模拟，所以用local 如果jar包在真实环境中则配node1:7077
    if (args.length > 0) {
      masterUrl = args(0)
    }

    //SparkStream的入口StreamingContext的配置   Create a StreamingContext with the given master URL
    val conf = new SparkConf().setMaster(masterUrl).setAppName("CarClickCountStat")
    val ssc = new StreamingContext(conf, Seconds(5))  // 对流入的RDD每5s处理一次
//    ssc.checkpoint(".")  //RDD缓存

    //Kafka的配置  Kafka configurations
    val topics = Set("car_events")  //Set就是一个集合
    val brokers = "node1:9092,node2:9092,node3:9092"  //Kafka的节点
      
    val kafkaParams = Map[String, String](
      "metadata.broker.list" -> brokers, "serializer.class" -> "kafka.serializer.StringEncoder")

     //rdis数据库的选择
    val dbIndex = 1

    // Create a direct stream： 数据流的RDD序列  D stream 将连续的数据进行持久化，离散化处理，便于实时处理
    val kafkaStream = KafkaUtils.createDirectStream[String, String, StringDecoder, StringDecoder](ssc, kafkaParams, topics)
//    val kafkaStream = KafkaUtils.createDirectStream(ssc, kafkaParams, fromOffSets, messageHandler)

    val events = kafkaStream.flatMap(line => {
      val data = JSONObject.fromObject(line._2)  //Kafka以键值对的形式存储数据 ， 直接取到值即可
      println(data)
      Some(data)
    })
//            .filter(x => (x.getString("car_id").matches("'[0-9A-Z].*")))

    // Compute car average speed for every camera

//    val dateString = (input:String) => {input.substring(1,14)}
//    dateString(x.getString("event_time")) + "_" + x.getString("road_id")
    //    val ff = (a:Tuple2[Int,Int], b:Tuple2[Int,Int]) => {(a._1 + b._1, a._2 + b._2)};
    val carSpeed = events.map(x => (x.getString("camera_id"),x.getInt("speed")))  // (卡口id,车速)  （（k,v）,（k,v））就是 map格式
//    val carSpeed = events.map(x => x.getString("road_id") -> (x.getInt("speed"),1))
//            .reduceByKey((a, b) => {(a._1 + b._1, a._2 + b._2)})
            .mapValues((x:Int)=>(x,1.toInt)) // (camera_id, (speed, 1))  //mapValues 指的是对每一项的值处理，便于车总量统计
//            .reduceByKeyAndWindow((a, b) => {(a._1 + b._1, a._2 + b._2)},Seconds(10))

            // (a._1 + b._1, a._2 + b._2)
            // a._1 + b._1 把同一个camera_id的速度都加到了一起
            // a._2 + b._2 把同一个camera_id下经过的车数量加一起
            .reduceByKeyAndWindow((a:Tuple2[Int,Int], b:Tuple2[Int,Int]) => {(a._1 + b._1, a._2 + b._2)},Seconds(20),Seconds(10)) //每隔10s执行前20s的数据

//    carSpeed.map{ case (key, value) => (key, value._1 / value._2.toFloat) }

    //便利所有RDD  direct stream： 是数据流的RDD序列,序列就是一个集合
    carSpeed.foreachRDD(rdd => {
      //便利所有Partition
      rdd.foreachPartition(partitionOfRecords => {
        val jedis = RedisClient.pool.getResource    //从池中获取Jedis客户端对象
        partitionOfRecords.foreach(pair => {
//        每20秒  (camera_id, (total_speed, total_count))
          val camera_id = pair._1
          val total = pair._2._1
          val count = pair._2._2
          //数据是实时处理的，所以车辆经过时间大致上就是此时的处理时间
          val now = Calendar.getInstance().getTime()
          // create the date/time formatters
          val minuteFormat = new SimpleDateFormat("HHmm")
          val dayFormat = new SimpleDateFormat("yyyyMMdd")
          val time = minuteFormat.format(now)
          val day = dayFormat.format(now)
          if(count!=0){  //如果路上有车，才对数据进行存储
//            val averageSpeed = total / count
            jedis.select(dbIndex) //选择redis数据库
            jedis.hset(day + "_" + camera_id, time , total + "_" + count)
            // fetch data from redis
//            val temp = jedis.hget(day + "_" + camera_id, time)
//            println(temp)
          }
        })
        RedisClient.pool.returnResource(jedis) //释放Jedis对象到池中
      })

    })

    println("xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx")

    ssc.start()   //开始
    ssc.awaitTermination()  //计算完毕退出

  }
}