package com.bus.batch

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{SaveMode, SparkSession}

/**
 * 离线数据分析部分
 */
object BusBatchAnalysis {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .appName("BusBatchAnalysis")
      .master("local[2]")
      .getOrCreate()

    val sc = spark.sparkContext

    val inputPath = "C:\\Users\\吉日木图\\Desktop\\公交车系统\\data\\bus.txt"

    val inputRDD = sc.textFile(inputPath)

    //getTopNBus(spark, inputRDD)
    //getTopNStation(spark, inputRDD)
    getBusNum(spark, inputRDD)

    spark.stop()
  }

  def getTopNBus(spark: SparkSession, inputRDD: RDD[String]): Unit = {
    import spark.implicits._
    val mapRDD = inputRDD.map(line => {
      val item = line.split("\\|", -1)
      //公交车
      val bus = item(0)
      //乘客数量
      val num = item(1)
      //时间
      val time = item(2)
      //获取小时
      val hour = time.substring(11, 13).toInt
      hour -> (bus + "|" + num)
    })

    val filterRDD = mapRDD.filter(tuple => {
      val hour = tuple._1
      //获取早晚高峰期数据
      (hour >= 7 && hour <= 9) || (hour >= 17 && hour <= 19)
    })

    val busNumDF = filterRDD.map(tuple => {
      val value = tuple._2
      val item = value.split("\\|", -1)
      BusNum(item(0), item(1).toInt)
    }).toDF()

    val jdbcDF = busNumDF.groupBy("bus").avg("num").withColumnRenamed("avg(num)", "num").orderBy($"num".desc).limit(5)
    jdbcDF.write.format("jdbc").mode(SaveMode.Overwrite).option("driver", "com.mysql.cj.jdbc.Driver").option("url", "jdbc:mysql://106.13.32.254:3306/febs_st_bus?serverTimezone=UTC&characterEncoding=utf8&useUnicode=true&useSSL=false").option("dbtable", "t_bus_topN").option("user", "st_bus").option("password", "123456").save()
  }

  def getTopNStation(spark: SparkSession, inputRDD: RDD[String]): Unit = {
    import spark.implicits._
    val mapRDD = inputRDD.map(line => {
      val item = line.split("\\|", -1)
      //公交车
      val station = item(3)
      //乘客数量
      val num = item(1)
      //时间
      val time = item(2)
      //获取小时
      val hour = time.substring(11, 13).toInt
      hour -> (station + "|" + num)
    })

    val filterRDD = mapRDD.filter(tuple => {
      val hour = tuple._1
      //获取早晚高峰期数据
      (hour >= 7 && hour <= 9) || (hour >= 17 && hour <= 19)
    })

    val stationNumDF = filterRDD.map(tuple => {
      val value = tuple._2
      val item = value.split("\\|", -1)
      StationNum(item(0), item(1).toInt)
    }).toDF()

    val jdbcDF = stationNumDF.groupBy("station").avg("num").withColumnRenamed("avg(num)", "num").orderBy($"num".desc).limit(5)
    jdbcDF.write.format("jdbc").mode(SaveMode.Overwrite).option("driver", "com.mysql.cj.jdbc.Driver").option("url", "jdbc:mysql://106.13.32.254:3306/febs_st_bus?serverTimezone=UTC&characterEncoding=utf8&useUnicode=true&useSSL=false").option("dbtable", "t_station_topN").option("user", "st_bus").option("password", "123456").save()

  }

  def getBusNum(spark: SparkSession, inputRDD: RDD[String]): Unit = {
    import spark.implicits._
    val mapDF = inputRDD.map(line => {
      val item = line.split("\\|", -1)
      //公交车
      val bus = item(0)
      //乘客数量
      val num = item(1).toInt
      //时间
      val time = item(2)
      //获取小时
      val hour = time.substring(11, 13).toInt
      BusHourNum(bus, num, hour)
    }).toDF()

    val jdbcDF = mapDF.groupBy("bus", "hour").avg("num").withColumnRenamed("avg(num)", "num")
    jdbcDF.write.format("jdbc").mode(SaveMode.Overwrite).option("driver", "com.mysql.cj.jdbc.Driver").option("url", "jdbc:mysql://106.13.32.254:3306/febs_st_bus?serverTimezone=UTC&characterEncoding=utf8&useUnicode=true&useSSL=false").option("dbtable", "t_bus_num").option("user", "st_bus").option("password", "123456").save()
  }

  case class BusNum(bus: String, num: Int)

  case class StationNum(station: String, num: Int)

  case class BusHourNum(bus: String, num: Int, hour: Int)

}
