package cn.spark.study.core

import org.apache.spark.SparkContext
import org.apache.spark.SparkConf

/*
 * 单词计数,并按降序排序
 */
object SortWordCount {
  def main(args: Array[String]){
    val sc = new SparkContext(new SparkConf().setAppName("SortWordCount").setMaster("local"))
    sc.textFile("E://BI学习//Spark//(新升级)Spark 2.0从入门到精通：Scala编程、大数据开发、上百个实战案例、内核源码深度剖析//000.课程代码+软件包//第29讲-Spark核心编程：使用Java、Scala和spark-shell开发wordcount程序//文档//spark.txt", 1).flatMap { _.split(" ")}.map { (_,1) }.reduceByKey(_ + _).map(pair1=>(pair1._2,pair1._1)).sortByKey(false, 1).map(pair2 => (pair2._2,pair2._1)).foreach(pair3 => println(pair3._1 + " appeared " + pair3._2 + " times"))
  }
}
