package com.fwmagic.spark.core.transformations

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * 用于对pairRDD按照key进行排序，第一个参数可以设置true（升序）或者false，默认是true
  */
object SortByKeyDemo {
    def main(args: Array[String]): Unit = {
        val conf: SparkConf = new SparkConf()
                .setAppName(this.getClass.getSimpleName)
                .setMaster("local[*]")

        val sc: SparkContext = new SparkContext(conf)

        val rdd = sc.parallelize(Array((3, 4),(1, 2),(4,4),(2,5), (6,5), (5, 6)))
        //println(rdd.partitions.length) //12
        //sortByKey不会触发shuffle操作
        //val sorted: RDD[(Int, Int)] = rdd.sortByKey(false)
        //sortBy会触发shuffle操作
        val sorted: RDD[(Int, Int)] = rdd.sortBy(_._1,false)

        sorted.collect.foreach(println)

        sc.stop()
    }

}
