package org.example.jasper
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.apache.spark.sql.functions._


object BZsum {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("BZ")
    val sc = new SparkContext(conf)

    // 创建 SparkSession
    val spark = SparkSession.builder
      .appName("BZsum")
      .getOrCreate()

    // 设置数据集所在的文件夹路径
    val dataFolderPath = "src/main/java/org/datas/bz"

    // 读取所有数据集
    val allData = spark.read
      .option("header", "true")
      .csv(s"$dataFolderPath/*")

    // 将 "data_inversa" 列转换为日期类型
    val dateFormat = "dd/MM/yyyy"
    val dfWithDate = allData.withColumn("data_inversa", to_timestamp(col("data_inversa"), dateFormat))

    // 提取年份并进行统计
    val result = dfWithDate
      .groupBy(year(col("data_inversa")).alias("year"))
      .agg(count("*").alias("accident_count"))
      .orderBy("year")

    // 打印结果
    result.show()

    // 将结果导入到 MySQL 数据库中
    val mysqlUrl = "jdbc:mysql://localhost:3306/BZaccident"
    val mysqlProperties = new java.util.Properties()
    mysqlProperties.setProperty("user", "root")
    mysqlProperties.setProperty("password", "011216")
    mysqlProperties.setProperty("driver", "com.mysql.jdbc.Driver")

    result.write
      .mode(SaveMode.Overwrite) // 如果表已存在，覆盖之前的数据
      .jdbc(mysqlUrl, "BZsum", mysqlProperties)
  }
}
