package org.fickler

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.apache.spark.sql.functions.{col, count, desc, max, month, struct, to_date, year}

/**
 * @author Fickler
 * @date 2024/1/7 18:09
 */
object UkYearMonth {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setMaster("local[*]").setAppName("UK")
    val sc = new SparkContext(conf)

    val spark = SparkSession.builder.appName("UkYearMonth").getOrCreate()

    val inputPath = "src/main/java/org/datas/dft-road-casualty-statistics-accident-1979-2020.csv"
    val df = spark.read.option("header", "true").csv(inputPath)

    // 解析日期并提取月份和年份信息
    val dfWithDate = df.withColumn("parsed_date", to_date(col("date"), "dd/MM/yyyy"))
      .withColumn("year", year(col("parsed_date")))
      .withColumn("month", month(col("parsed_date")))

    // 根据年份和月份进行分组计数
    val accidentFrequencyByYearMonth = dfWithDate.groupBy("year", "month")
      .agg(count("*").alias("accident_count"))
      .orderBy(desc("year"), desc("month"))

    // 找出每年最易发生事故的月份
    val mostAccidentProneMonthByYear = accidentFrequencyByYearMonth.groupBy("year")
      .agg(max(struct(col("accident_count"), col("month"))).alias("max_accident"))
      .select("year", "max_accident.month", "max_accident.accident_count")
      .orderBy(desc("year"), desc("max_accident.month"))

    mostAccidentProneMonthByYear.show()

    val outputPath = "src/main/java/org/UkResult/UkYearMonth"
    mostAccidentProneMonthByYear
      .coalesce(1)
      .write
      .mode("overwrite")
      .option("header", "true")
      .csv(outputPath)

    val mysqlUrl = "jdbc:mysql://localhost:3306/ukaccident"
    val mysqlProperties = new java.util.Properties()
    mysqlProperties.setProperty("user", "root")
    mysqlProperties.setProperty("password", "011216")
    mysqlProperties.setProperty("driver", "com.mysql.jdbc.Driver")

    mostAccidentProneMonthByYear.write
      .mode(SaveMode.Overwrite)
      .jdbc(mysqlUrl, "UkYearMonth", mysqlProperties)

    spark.stop()
  }

}
