package org.example
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
object spark_Data_traffic {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .getOrCreate()
    val sc = spark.sparkContext

    val trafficRDD:RDD[String]=sc.textFile("src/main/resources/traffic-data.txt")
    trafficRDD.take(5).foreach(println)
    println(trafficRDD.count())
    val rest1=trafficRDD.filter(line=>{
      val split=line.split(",")
      split(4).toDouble>90
    })
    rest1.foreach(println)
    val res2=trafficRDD.map(line=>{
      val split=line.split(",")
      val qyID=split(6)
      (qyID,1)
    }).reduceByKey((x,y)=>x+y)
      .sortBy(_._2,ascending=false)
    res2.take(5).foreach(println)

    val res3=trafficRDD.map(line=>{
      val split=line.split(",")
      val carID=split(2)
      val province=carID.split("-")(0)
      (province,1)
    }).reduceByKey((x,y)=>x+y)
    res3.foreach(println)
    sc.stop()
  }

}
