package com.scala.learn.wordcount

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}


object ScalaWordCount {
  def main(args: Array[String]): Unit = {
 val  args=Array("D:\\tmp\\log.txt","D:\\tmp\\log2.txt")
    //设置配置信息
    val conf = new SparkConf().setAppName("ScalaWordCount").setMaster("local")
      //.setMaster("local")
    //sc是spark程序的入口
    val sc = new SparkContext(conf)
    //flatMap是spark RDD包下的类
    // sc.textFile(args(0)).flatMap(_.split(" ")).map(word => (word, 1)).reduceByKey(_ + _).saveAsTextFile(args(1))
    val lines: RDD[String] = sc.textFile(args(0))

    val words: RDD[String] = lines.flatMap(_.split(" "))
    //单词 ：1
    val wordsAndOne: RDD[(String, Int)] = words.map(w => (w, 1))
    //分组聚合，相同key的放在一起排序，reduceByKey是pairRddFunction
    val reduced: RDD[(String, Int)] = wordsAndOne.reduceByKey(_ + _)
    //排序(倒序)
    val sorted: RDD[(String, Int)] = reduced.sortBy(_._2, false)

    sorted.saveAsTextFile(args(1))

    //关闭资源
    sc.stop()

  }

}
