package com.dxf.day01

import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkContext}

object WordCount {


  def main(args: Array[String]): Unit = {
    Logger.getLogger("org").setLevel(Level.ERROR)
    val conf = new SparkConf()
      .setAppName("wordcount")
      .setMaster("local")

    val sc = new SparkContext(conf)

    val data = sc.textFile("datas/2.txt")
  // \s 空格  + 1个或者多个
    val words = data.flatMap(_.split("\\s+"))
//    words.foreach(println)

  //组装 (单词,1)
    val wordOne = words.map((_, 1))

    //统计单词个数

    val wordCount = wordOne.reduceByKey(_ + _)
    wordCount.foreach(println)

    //排序
   println("排序===")
    val sort = wordCount.sortBy(-_._2)

    sort.foreach(println)

    //map：对集合中每个元素进行操作。
    //flatMap：对集合中每个元素进行操作然后再扁平化。



  }

}
