package test

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

import java.sql.DriverManager

object kk {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .getOrCreate()
    val sc = spark.sparkContext
  //数据加载 =》监测点编号，监控编号，车牌号，抓拍时间，车速，道路变化，区域编号
    val trafficRDD: RDD[String] = sc.textFile("src/main/resources/traffic-data.txt")
    trafficRDD.take(5).foreach(println)
    println(trafficRDD.count())

    val res1 = trafficRDD.filter(line => {
      val split = line.split(",")
      split(4).toDouble > 90
    })
    res1.foreach(println)

    val res2 = trafficRDD.map(line => {
      val split = line.split(",")
      val qyID = split(6)
      (qyID, 1)
    }).reduceByKey((x,y) => x+y)
      .sortBy(_._2,ascending = false)
      res2.take(5).foreach(println)

    val res3 = trafficRDD.map(line => {
      val split = line.split(",")
      val carID = split(2)
      val province = carID.split("-")(0)
      (province, 1)
    }).reduceByKey((x, y) => x + y)
      res3.foreach(println)

    //作业

    val jdbcUrl = "jdbc:mysql://localhost:3306/traffic"
    val username = "root"
    val password = "123456"
    Class.forName("com.mysql.cj.jdbc.Driver")
    res3.foreachPartition(partition => {
      var connection: java.sql.Connection = null
      try {
        connection = DriverManager.getConnection(jdbcUrl, username, password)
        partition.foreach { case (province, count) =>
          val insertQuery = "INSERT INTO traffic (province, flowCount) VALUES (?, ?)"
          val preparedStatement = connection.prepareStatement(insertQuery)
          try {
            preparedStatement.setString(1, province)
            preparedStatement.setInt(2, count)
            preparedStatement.executeUpdate()
          } finally {
            preparedStatement.close()
          }
        }
      } catch {
        case e: Exception => e.printStackTrace()
      } finally {
        if (connection != null) connection.close()
      }
    })

    // 从 MySQL 数据库中查询数据
    var connection: java.sql.Connection = null
    var statement: java.sql.Statement = null
    var resultSet: java.sql.ResultSet = null
    try {
      connection = DriverManager.getConnection(jdbcUrl, username, password)
      statement = connection.createStatement()
      resultSet = statement.executeQuery("SELECT province, flowCount FROM traffic")
      while (resultSet.next()) {
        val province = resultSet.getString("province")
        val flowCount = resultSet.getInt("flowCount")
        println(s"Province: $province, FlowCount: $flowCount")
      }
    } catch {
      case e: Exception => e.printStackTrace()
    } finally {
      if (resultSet != null) resultSet.close()
      if (statement != null) statement.close()
      if (connection != null) connection.close()
    }


    spark.stop()
  }
}
