import org.apache.spark
import org.apache.spark.sql.SparkSession
object Rdd {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("RDDPartitionExample")
      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext

    val rdd = sc.textFile("D:\\Employee_salary_first_half.csv")

    val header = rdd.first()
    val dataRdd = rdd.filter(_ != header)
    val extractedRdd = dataRdd.flatMap { line =>
      try {
        val parts = line.split(",")
        val name = parts(1)
        val salary = parts(6).toInt
        Some((name, salary))
      } catch {
        case _: Exception => None
      }
    }

    val sortedRdd = extractedRdd.sortBy(_._2, ascending = false)
    val top3Employees = sortedRdd.take(3)

    top3Employees.foreach { case (name, salary) =>
      println(s"员工姓名: $name, 实际薪资: $salary")
    }
    spark.stop()
  }
}
