package org.zhazhahei


  import org.apache.spark.{SparkConf, SparkContext}
  import org.apache.spark.sql.{SaveMode, SparkSession}
  import org.apache.spark.sql.functions._
  import org.apache.spark.sql.expressions.Window

  object accidentcount_wind {
    def main(args: Array[String]): Unit = {


      val conf = new SparkConf().setMaster("local[*]").setAppName("YourAppName")
      val sc = new SparkContext(conf)

      val spark = SparkSession.builder
        .appName("accidentcount_wind")
        .getOrCreate()

      val inputPath = "src\\main\\java\\org\\datas\\US_Accidents_March23.csv"
      val df = spark.read
        .option("header", "true")
        .csv(inputPath)

      // 将 "Wind_Speed(mph)" 列转换为数字类型
      val dfWithWindSpeed = df.withColumn("WindSpeed", col("Wind_Speed(mph)").cast("double"))

      // 计算相邻风速之间的差异，并找到差异最大的风速
      val maxDiffWindSpeedRow = dfWithWindSpeed
        .withColumn("WindSpeedDiff", lag("WindSpeed", 1) over Window.orderBy("WindSpeed"))
        .withColumn("Diff", when(col("WindSpeedDiff").isNotNull, col("WindSpeed") - col("WindSpeedDiff")))
        .orderBy(desc("Diff"))
        .first()


      if (maxDiffWindSpeedRow != null) {
        // 获取整个行的数据
        val significantWindSpeedRow = dfWithWindSpeed
          .filter(col("WindSpeed") === maxDiffWindSpeedRow.getAs[Double]("WindSpeed"))
          .select("*")

        println("The most significant wind speed row is:")
        significantWindSpeedRow.show(false)  // 显示所有列

        val jdbcURL = "jdbc:mysql://localhost:3306/accident"
        val tableName = "wind_table"
        val connectionProperties = new java.util.Properties()
        connectionProperties.setProperty("user", "root")
        connectionProperties.setProperty("password", "011216")

        significantWindSpeedRow.write
          .mode(SaveMode.Overwrite)
          .jdbc(jdbcURL, tableName, connectionProperties)



        val outputCSVPath = "src\\main\\java\\org\\USresult/wind"
        significantWindSpeedRow.write
          .option("header", "true")
          .csv(outputCSVPath)
      } else {
        println("No significant wind speed found.")
      }


      spark.stop()
    }
  }

