package com.ustcinfo.study.scala.r1.panwentao

import org.apache.spark.{SparkConf, SparkContext}

object panwentao {
  def main(args : Array[String]): Unit ={
    val sparkConf=new SparkConf().setMaster("local").setAppName("test2")
    //初始化上下文
    val sc=new SparkContext(sparkConf)
    //打开文件
    val txtRdd=sc.textFile("file:///E:\\programmingGuide")
    //过滤空行数据
    txtRdd.filter(x => x.trim().length>0)
      //将数据按空格分割
      .map(x => x.split(" "))
      //将数据展开
      .flatMap(x => x)
      //将每个单词映射成(key , value)形式
      .map(x => (x,1))
      //合并key值相同的键值对
      .reduceByKey(_+_)
      // 将key值与value值互换
      .map(x => (x._2,x._1))
      //按降序打印出前十行
      .sortByKey(false).take(10).foreach(println)

  }

}
