package vip.shuai7boy.trafficStreaming

import java.text.SimpleDateFormat
import java.util.Calendar

import net.sf.json.JSONObject
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.streaming._
import org.apache.spark.streaming.dstream.{DStream, InputDStream}
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.{CanCommitOffsets, HasOffsetRanges, KafkaUtils, OffsetRange}
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import vip.shuai7boy.util.RedisClient

/** *
 * 利用SparkStreaming实时统计每个卡口，每分钟的车流总速度，车流量
 * 将总速度_车辆数  一起写入redis中
 */
object AnalyticsCarCount {
  //从kafka读取数据
  //获取卡口id，速度id
  //映射为卡口id，车辆1，速度
  //使用窗口函数 统计卡口总的速度，总的车辆，总的速度
  //将统计的数据写入redis

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("CarEventCountAnalytics")
    conf.set("spark.streaming.kafka.consumer.cache.enabled", "false")
    val sc = new StreamingContext(conf, Seconds(5))

    val topics = Set("car_events")
    val brokers = "tuge1:9092"

    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> brokers,
      "key.deserializer" -> classOf[StringDeserializer],
      "value.deserializer" -> classOf[StringDeserializer],
      "group.id" -> "predictGroup",
      "auto.offset.reset" -> "earliest",
      "enable.auto.commit" -> (false: java.lang.Boolean) //默认是true
    )
    //获取kafka数据
    val kafkaStream: InputDStream[ConsumerRecord[String, String]] = KafkaUtils.createDirectStream[String, String](sc,
      PreferConsistent,
      Subscribe[String, String]
        (topics,
          kafkaParams))
    //将数据转换为JSON格式
    val events = kafkaStream.map(line => {
      val jsonVal = JSONObject.fromObject(line.value())
      jsonVal
    })
    val carSpeed = events.map(jd => (jd.getString("camera_id"), jd.getInt("speed"))).mapValues((speed: Int) => (speed,
      1))
      .reduceByKeyAndWindow((a: Tuple2[Int, Int], b: Tuple2[Int, Int]) => {
        (a._1 + b._1, a._2 + b._2)
      }, Seconds(60), Seconds(60))

    //将数据存入redis
    //存入格式 key：当天日期_卡口 ，value:总的速度_总的车辆 ，字段:每分钟
    val dbIndex = 1
    
    println("-------------------------消费中-------------------------------------")
    carSpeed.foreachRDD(rdd => {
      rdd.foreachPartition(partitionOfRecords => {
        val jedis = RedisClient.pool.getResource
        partitionOfRecords.foreach(pair => {
          val camera_id = pair._1
          val speedTotal = pair._2._1
          val carCount = pair._2._2
          val now = Calendar.getInstance().getTime
          // create the date/time formatters
          val dayFormat = new SimpleDateFormat("yyyyMMdd")
          val minuteFormat = new SimpleDateFormat("HHmm")
          val day = dayFormat.format(now) //20190514
          val time = minuteFormat.format(now) //1735
          if (carCount != 0 && speedTotal != 0) {
            jedis.select(dbIndex)
            //20190514_camera_id01   -- k,v  -- (1542,200_5)
            println(day + "_" + camera_id, time, speedTotal + "_" + carCount)
            jedis.hset(day + "_" + camera_id, time, speedTotal + "_" + carCount)
          }
        })
        RedisClient.pool.returnResource(jedis)
      })
    })

    /**
     * 手动维护，异步更新offset
     */
    kafkaStream.foreachRDD(rdd => {
      val offsetRange = rdd.asInstanceOf[HasOffsetRanges].offsetRanges
      kafkaStream.asInstanceOf[CanCommitOffsets].commitAsync(offsetRange)
    })
    sc.start()
    sc.awaitTermination()

  }
}
