package com.shujia.core

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

object Demo12SortBy {
  def main(args: Array[String]): Unit = {
    /**
     * sortBy：转换算子，可以对RDD的数据进行排序
     */
    val conf: SparkConf = new SparkConf()
    conf.setMaster("local")
    conf.setAppName("Demo12SortBy")

    val sc: SparkContext = new SparkContext(conf)
    val stuRDD: RDD[Stu] = sc
      .textFile("spark/data/stu/students.txt")
      .map(line => {
        val splits: Array[String] = line.split(",")
        Stu(splits(0), splits(1), splits(2).toInt, splits(3), splits(4))
      })

    // 按照班级降序排列
    stuRDD.sortBy(_.clazz, ascending = false).foreach(println)

    // 按照班级降序排列 再按照年龄升序排列
    stuRDD.sortBy(stu=>stu.clazz + (1000-stu.age), ascending = false).foreach(println)



  }

  case class Stu(id: String, name: String, age: Int, gender: String, clazz: String)

}
