package org.fickler

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.apache.spark.sql.functions.{col, count, hour, substring, unix_timestamp, when}

/**
 * @author Fickler
 * @date 2024/1/7 17:11
 */
object UkTimePeriod {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setMaster("local[*]").setAppName("UK")
    val sc = new SparkContext(conf)

    val spark = SparkSession.builder.appName("UkTimePeriod").getOrCreate()

    val inputPath = "src/main/java/org/datas/dft-road-casualty-statistics-accident-1979-2020.csv"
    val df = spark.read.option("header", "true").csv(inputPath)

    // 将时间戳转换为小时并定义时间段
    val timeFormat = "HH:mm"
    val dfWithHour = df.withColumn("hour", substring(col("time"), 1, 2).cast("int"))

    val hourColumn = dfWithHour.withColumn("hour_range",
      when(col("hour") >= 0 && col("hour") < 1, "00:00-01:00")
        .when(col("hour") >= 1 && col("hour") < 2, "01:00-02:00")
        .when(col("hour") >= 2 && col("hour") < 3, "02:00-03:00")
        .when(col("hour") >= 3 && col("hour") < 4, "03:00-04:00")
        .when(col("hour") >= 4 && col("hour") < 5, "04:00-05:00")
        .when(col("hour") >= 5 && col("hour") < 6, "05:00-06:00")
        .when(col("hour") >= 6 && col("hour") < 7, "06:00-07:00")
        .when(col("hour") >= 7 && col("hour") < 8, "07:00-08:00")
        .when(col("hour") >= 8 && col("hour") < 9, "08:00-09:00")
        .when(col("hour") >= 9 && col("hour") < 10, "09:00-10:00")
        .when(col("hour") >= 10 && col("hour") < 11, "10:00-11:00")
        .when(col("hour") >= 11 && col("hour") < 12, "11:00-12:00")
        .when(col("hour") >= 12 && col("hour") < 13, "12:00-13:00")
        .when(col("hour") >= 13 && col("hour") < 14, "13:00-14:00")
        .when(col("hour") >= 14 && col("hour") < 15, "14:00-15:00")
        .when(col("hour") >= 15 && col("hour") < 16, "15:00-16:00")
        .when(col("hour") >= 16 && col("hour") < 17, "16:00-17:00")
        .when(col("hour") >= 17 && col("hour") < 18, "17:00-18:00")
        .when(col("hour") >= 18 && col("hour") < 19, "18:00-19:00")
        .when(col("hour") >= 19 && col("hour") < 20, "19:00-20:00")
        .when(col("hour") >= 20 && col("hour") < 21, "20:00-21:00")
        .when(col("hour") >= 21 && col("hour") < 22, "21:00-22:00")
        .when(col("hour") >= 22 && col("hour") < 23, "22:00-23:00")
        .when(col("hour") >= 23 && col("hour") <= 24, "23:00-24:00")
        .otherwise("Unknown"))

    // 根据时间段进行分组计数
    val timeRangeFrequency = hourColumn.groupBy("hour_range")
      .agg(count("*").alias("accident_count"))
      .orderBy("hour_range")

    timeRangeFrequency.show()

    val outputPath = "src/main/java/org/UkResult/UkTimePeriod"
    timeRangeFrequency
      .coalesce(1)  // 将结果合并为一个分区
      .write
      .mode("overwrite")
      .option("header", "true")
      .csv(outputPath)

    val mysqlUrl = "jdbc:mysql://localhost:3306/ukaccident"
    val mysqlProperties = new java.util.Properties()
    mysqlProperties.setProperty("user", "root")
    mysqlProperties.setProperty("password", "011216")
    mysqlProperties.setProperty("driver", "com.mysql.jdbc.Driver")

    timeRangeFrequency.write
      .mode(SaveMode.Overwrite)
      .jdbc(mysqlUrl, "UkTimePeriod", mysqlProperties)

    spark.stop()

  }

}
