package chapter03

import org.apache.spark.{SparkConf, SparkContext}

object Test17_maxSalary {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("MaxSalary")
    val sc = new SparkContext(conf)
    val value = sc.textFile("input/Employee_salary_first_half.csv")
    //去掉表头
    val value1 = value.mapPartitionsWithIndex((index, e) => {
      if (index == 0) e.drop(1) else e
    })
    //计算每个部门的工资最高的员工的工资和姓名部门名
    println(value1
      .map(e=>e.split(","))
      .map(e=>(e(1),e(5).toInt,e(9)))
      .groupBy(e=>e._3)
      .map(e=>(e._1,e._2.toList.sortBy(f=>f._2).reverse.take(1)))
      .collect().mkString("Array(", ", ", ")"))
    //计算每个部门中工资大于50000的员工姓名，部门，工资 将结果排序从大到小
    println(value1
      .map(e=>e.split(","))
      .map(e=>(e(1),e(5).toInt,e(9)))
      .groupBy(e=>e._3)
      .map(e=>(e._1,e._2.toList.filter(f=>f._2>50000).sortBy(f=>f._2).reverse))
      .saveAsTextFile("input/salary"))
  }
}
