package com.csw.spark

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD

object Demo13Sort {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf()
      .setMaster("local")
      .setAppName("mapValues")

    val sc: SparkContext = new SparkContext(conf)

    val aggRDD: RDD[(String, Int)] = sc.parallelize(List(("001", 29), ("002", 24), ("003", 25)))

    /**
      * sortBy：指定一个排序的列，默认是升序（即ascending=true）
      * sortByKey：通过key进行排序
      *
      */
    val sortRDD: RDD[(String, Int)] = aggRDD.sortBy(kv=>kv._2,false)

    sortRDD.foreach(println)


    val rdd1: RDD[(Int, String)] = sc.parallelize(List((10,"csw"),(2,"csw1"),(16,"csw2")))

    val rdd2: RDD[(Int, String)] = rdd1.sortByKey(false)

    rdd2.foreach(println)
  }
}
