package com.ustcinfo.study.scala.r4.xuzongbao

import org.apache.spark.{SparkConf, SparkContext}

/**
  * @author xuzongbao
  *
  */
object WordCount {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setMaster("local").setAppName("xzb_wordcount")
    val sc = new SparkContext(sparkConf)
    val textRdd = sc.textFile("src/main/resources/test")

    textRdd
      .filter(x => x.trim.length>0)//过滤掉空字符串
      .map(x => x.split(" "))//根据空格切分字符串
      .flatMap(x => x).map(x => (x,1))//将切分后的字符串进行结构扁平化
      .reduceByKey(_ + _)//根据key进行加总
      .map(x => (x._2,x._1))//调换key和value的位置
      .sortByKey(false)//按照key排序
      .take(5)//取前5个数值
      .foreach(println)//遍历
  }
}
