package com._51doit.spark01

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object WordCount {
  def main(args: Array[String]): Unit = {
    if(args.length != 2){
     println("Usage :com._51doit.spark01.WordCount<input><output>")
      sys.exit(1)
    }

    //接收参数
    
    val Array(input,output)=args

    //创建出sparkContext实例
    val conf: SparkConf = new SparkConf()

    val sc: SparkContext = new SparkContext(conf)

    //读取文件

    val rdd1: RDD[String] = sc.textFile(input)

    //切割文件并且压平
    val rdd2: RDD[String] = rdd1.flatMap(_.split(" "))

    //组装
    val rdd3: RDD[(String, Int)] = rdd2.map((_,1))


    //分组聚合
    val rdd4: RDD[(String, Int)] = rdd3.reduceByKey(_+_)

    //排序
     val rdd5: RDD[(String, Int)] = rdd4.sortBy(-_._2)
    

    //输出结果
    rdd5.saveAsTextFile(output)
    
    
    //终极释放
    sc.stop()
    
    
  }


}
