package org.example

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types.{DataTypes, StructField, StructType}

import java.util.Properties

object data1_traffic {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder
      .master("local[*]")
      .appName("spark")
      .getOrCreate()
//    三种方式读取交通数据 rdd dataframe dataset
    val sc = spark.sparkContext
    val rdd1 = sc.textFile("src/main/resources/traffic-data.txt")
    rdd1.take(5).foreach(println)
    println(rdd1.count())
//    监测点 监控编号 车牌号 抓拍时间 车速 道路编号 区域编号
val schema1 = StructType(Seq(
  StructField("jcID", DataTypes.StringType),
  StructField("jkID", DataTypes.StringType),
  StructField("carID", DataTypes.StringType),
  StructField("time", DataTypes.StringType),
  StructField("speed", DataTypes.StringType),
  StructField("luID", DataTypes.StringType),
  StructField("quID", DataTypes.StringType) ))
    val df = spark.read.text("src/main/resources/traffic-data.txt")
    val ds = spark.read.textFile("src/main/resources/traffic-data.txt")

    df.printSchema()
    df.show(3)
    ds.printSchema()
    ds.show(2)
//如何将txt文件以DataFrame和DataSet的方式读取为表结构
//1.筛选车速超过90的所有记录
val res1 = rdd1.filter(x => {
  val y = x.split(",")
  y(4).toDouble > 90
})
    res1.take(3).foreach(println)
    println(res1.count())
//   2.计算各区域的交通流量并降序排序
val res2 = rdd1.map(x =>{
  val y = x.split(",")
  val qyID = y(6)
  (qyID,1)

}).reduceByKey((x,y) =>x+y).sortBy(_._2,ascending = false)
    res2.take(5).foreach(println)
//    3.将各省份车牌的交通流量计算结果保存到MySQL数据库中
val res3 = rdd1.map(x => {
  val split = x.split(",")
  val carID = split(2)
  val province = carID.split("-")(0)
  (province, 1)
}).reduceByKey((x, y) => x + y)
    res3.foreach(println)
//    将结果写入到数据库中（province,carFlow）
    import spark.implicits._
    val resultDF = res3.toDF("province", "carFlow")
    val jdbcUrl = "jdbc:mysql://localhost:3306/your_database_name"
    val properties = new Properties()
    properties.setProperty("user", "root")
    properties.setProperty("password", "123456")
    properties.setProperty("driver","com.mysql.jdbc.Driver")
    resultDF.write
      .mode("overwrite")
      .jdbc(jdbcUrl, "car", properties)


    sc.stop()
  }
}
