package morefun.demo

import org.apache.spark.{SparkConf, SparkContext}

object SortWordCount {


  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName("LocalFile").setMaster("local")
    val sc = new SparkContext(conf)

    val lines = sc.textFile("file:/Users/wdy/tmp/spark.txt")

    // 先计算单词次数
    val wordCount = lines.flatMap(line => line.split(" ")).map(word => (word, 1)).reduceByKey(_+_)
    wordCount.foreach(println(_))

    // 把单词与次数反过来形成新的 RDD，再把单词个数作为key进行 sortByKey 降序排序
    val sortCountWord = wordCount.map( countWord => (countWord._2, countWord._1)).sortByKey(false)
    sortCountWord.foreach(println(_))

    // 再把单词与次数反过来
    val sortWordCount = sortCountWord.map( countWord => (countWord._2, countWord._1))
    sortWordCount.foreach(println(_))

  }
}
