package com.shujia.streaming

import com.alibaba.fastjson.{JSON, JSONObject}
import com.alibaba.fastjson.serializer.JSONObjectCodec
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Durations, StreamingContext}

import java.lang
import java.sql.{Connection, DriverManager, PreparedStatement}
import java.text.SimpleDateFormat
import java.util.Date

object Demo9CardCondition {
  def main(args: Array[String]): Unit = {
    val sparkSession: SparkSession = SparkSession.builder()
      .master("local[2]")
      .appName("提交命令执行")
      .config("spark.sql.shuffle.partitions", 1)
      .getOrCreate()

    val sparkContext: SparkContext = sparkSession.sparkContext
    val sparkStreaming: StreamingContext = new StreamingContext(sparkContext, Durations.seconds(5))


    /**
     *
     * 需求：统计卡口流量的情况
     * 读取卡口过车的监控数据
     * {"car":"皖A9A7N2","city_code":"340500","county_code":"340522","card":117988031603010,"camera_id":"00001","orientation":"西南","road_id":34052055,"time":1614711895,"speed":36.38}
     */
    val InfoDS: ReceiverInputDStream[String] = sparkStreaming.socketTextStream("master", 12345)

    /**
     * 方式1：在数据源的产生的时候，设置窗口
     * 统计最近的15s之内的卡口情况，每5秒统计一次
     */
    //设置滑动窗口
    val carJsonDS: DStream[String] = InfoDS.window(Durations.seconds(15), Durations.seconds(5))

    /**
     * 1、解析接收到的json数据
     * fastjson第三方工具处理
     *
     * json中值如果被双引号括起来的是一个字符串，若没有切是一个整数，一般都用long来接收，小数一般用double
     *
     */
    val cardAndSpeedDS: DStream[(Long, (Double, Int))] = carJsonDS.map((line: String) => {
      //使用fastjson将字符串转成Json对象
      val jSONObject: JSONObject = JSON.parseObject(line)
      //根据需求，只需要获取卡口的值和车速
      val cardId: Long = jSONObject.getLong("card")
      val carSpeed: Double = jSONObject.getDouble("speed")
      (cardId, (carSpeed, 1)) //假设每次产生的数据都不是同一辆车
    })

    /**
     * 2、实时统计每个卡口的平均车速和车的数量
     * 方式2：在DS调用reduceByKeyAndWindow的时候设置窗口
     *
     */
    //    cardAndSpeedDS.reduceByKeyAndWindow()
    val cardConditionDS: DStream[(Long, (Double, Int))] = cardAndSpeedDS.reduceByKey((kv1: (Double, Int), kv2: (Double, Int)) => {
      //将同一卡口的1加起来，就得到这个批次中的车的数量
      val carNumber: Int = kv1._2 + kv2._2
      //将同一卡口的速度加起来 / 车的数量 = 这一批次的卡口平均速度
      val sumSpeed: Double = kv1._1 + kv2._1

      val avgSpeed: Double = sumSpeed / carNumber
      (avgSpeed, carNumber)
    })

    /**
     * 将结果保存到数据库中
     */
    cardConditionDS.foreachRDD((rdd: RDD[(Long, (Double, Int))]) => {
      rdd.foreachPartition((itr: Iterator[(Long, (Double, Int))]) => {
        println("------------数加 防伪码-------------------")
        //创建与数据库的连接对象
        //1、注册驱动
        Class.forName("com.mysql.jdbc.Driver")
        //2、获取数据库连接对象
        val conn: Connection = DriverManager.getConnection(
          "jdbc:mysql://master:3306/bigdata29?useUnicode=true&characterEncoding=utf-8&useSSL=false",
          "root",
          "123456"
        )
        //获取数据库操作对象
        val ps: PreparedStatement = conn.prepareStatement("insert into card_condition values(?,?,?,?)")

        //获取数据处理时间
        val piCiTime: String = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date())

        itr.foreach((f: (Long, (Double, Int))) => {
          val cardId: Long = f._1
          val avgSpeed: Double = f._2._1
          val carNumber: Int = f._2._2
          ps.setLong(1, cardId)
          ps.setDouble(2, avgSpeed)
          ps.setInt(3, carNumber)
          ps.setString(4, piCiTime)

          ps.executeUpdate()
        })

        //释放资源
        ps.close()
        conn.close()
        println()
      })
    })


    sparkStreaming.start()
    sparkStreaming.awaitTermination()
    sparkStreaming.stop()

  }
}
