package com.doit.day02

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Author:
 * @WX: 17710299606
 * @Tips: 学大数据 ,到多易教育
 * @DOC: https://blog.csdn.net/qq_37933018?spm=1000.2115.3001.5343
 * @Description:
 */
object Demo04GroupByKey {
  def main(args: Array[String]): Unit = {

       val  conf  = new SparkConf()
             .setMaster("local[*]")
             .setAppName(this.getClass.getSimpleName)
           val sc = SparkContext.getOrCreate(conf)


    val rdd1: RDD[String] = sc.textFile("data/words")

    val rdd2: RDD[String] = rdd1.flatMap(_.split("\\s+"))



    val rdd3: RDD[(String, Int)] = rdd2.map(w => (w, 1))
   // val res1: RDD[(String, Iterable[(String, Int)])] = rdd3.groupBy(_._1)
    val res2: RDD[(String, Iterable[Int])] = rdd3.groupByKey()
    val wc = res2.mapValues(_.size)


    wc.sortBy(_._2 , false).foreach(println)
    rdd3.reduceByKey(_+_).sortBy(_._2,false).foreach(println)





  }

}
