package org.zhazhahei

  import org.apache.spark.{SparkConf, SparkContext}
  import org.apache.spark.sql.{SaveMode, SparkSession}
  import org.apache.spark.sql.functions._

  object accidentcount_direction {
    def main(args: Array[String]): Unit = {

      val conf = new SparkConf().setMaster("local[*]").setAppName("YourAppName")
      val sc = new SparkContext(conf)

      val spark = SparkSession.builder
        .appName("accidentcount_direction")
        .getOrCreate()

      val inputPath = "src\\main\\java\\org\\datas\\US_Accidents_March23.csv"
      val df = spark.read
        .option("header", "true")
        .csv(inputPath)

      // 将 "Start_Lat" 列转换为数字类型
      val dfWithLatitude = df.withColumn("Start_Lat", col("Start_Lat").cast("double"))

      // 添加方向列，大于37为北方，小于37为南方
      val dfWithDirection = dfWithLatitude
        .withColumn("Direction", when(col("Start_Lat") > 37, "North").otherwise("South"))

      // 统计每个方向的车祸数量
      val result = dfWithDirection
        .groupBy("Direction")
        .agg(count("*").alias("accident_count"))

      result.show()

      val jdbcURL = "jdbc:mysql://localhost:3306/accident"
      val tableName = "direction_table"
      val connectionProperties = new java.util.Properties()
      connectionProperties.setProperty("user", "root")
      connectionProperties.setProperty("password", "011216")

      result.write
        .mode(SaveMode.Overwrite)
        .jdbc(jdbcURL, tableName, connectionProperties)

      val outputPath = "src\\main\\java\\org\\USresult/direction"
      result.coalesce(1).write.mode(SaveMode.Overwrite).csv(outputPath)

      spark.stop()
    }
  }

