package com.darrenchan.spark.rdd

import org.apache.spark.{SparkConf, SparkContext}

/**
  * 按照单词出现的顺序降序排列
  * 因为只能按照key进行排序
  * 所以先把key和value进行交换，
  * 即：(hello, 3) => (3, hello)
  * 然后进行排序以后，再交换回来
  */
object OrderByKey {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setMaster("local[2]").setAppName("test")
    val sc = new SparkContext(sparkConf)
    val countsRDD = sc.parallelize(List("hello spark", "hello world", "hello world")).
      flatMap(_.split(" ")).
      map((_, 1)).
      reduceByKey(_+_)

    val resRDD = countsRDD.map(x => (x._2, x._1)).sortByKey(false). //默认是升序
      map(x => (x._2, x._1))

    println(resRDD.collect().mkString(" "))

    sc.stop()
  }
}
