package org.zhazhahei


import org.apache.spark.api.java.function
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{SaveMode, SparkSession, functions}
import org.apache.spark.sql.functions._

object accidentcount_signal {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setMaster("local[*]").setAppName("YourAppName")
    val sc = new SparkContext(conf)

    val spark = SparkSession.builder
      .appName("accidentcount_signal")
      .getOrCreate()

    val inputPath = "src\\main\\java\\org\\datas\\US_Accidents_March23.csv"
    val df = spark.read
      .option("header", "true")
      .csv(inputPath)

    // 统计 "Sunrise_Sunset" 列中字符串 "Day" 的次数
    val dayCount = df.filter(col("Sunrise_Sunset") === "Day").count()

    // 统计 "Sunrise_Sunset" 列中字符串 "Night" 的次数
    val nightCount = df.filter(col("Sunrise_Sunset") === "Night").count()

//
//    print(dayCount);
//    print(nightCount);

    val data = Seq(("Day", dayCount), ("Night", nightCount))
    val columns = Seq("Sunrise_Sunset", "Count")

    val resultDF = spark.createDataFrame(data).toDF(columns: _*)

    resultDF.show()

    val jdbcURL = "jdbc:mysql://localhost:3306/accident"
    val tableName = "signal_table"
    val connectionProperties = new java.util.Properties()
    connectionProperties.setProperty("user", "root")
    connectionProperties.setProperty("password", "011216")

    resultDF.write
      .mode(SaveMode.Overwrite)
      .jdbc(jdbcURL, tableName, connectionProperties)


    val outputCSVPath = "src\\main\\java\\org\\USresult/signal/signal.csv"
    resultDF.write
      .mode(SaveMode.Overwrite)
      .option("header", "true")
      .csv(outputCSVPath)

    spark.stop()
  }
}
