package sparkcore.day3.lesson02

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by Administrator on 2018/4/24.
  */
object GroupBykeyTest {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local").setAppName("test")
    val sc = new SparkContext(conf)

    val array = Array("you,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump")
    val rdd = sc.parallelize(array)

    val groupByKeyDD: RDD[(String, Iterable[Int])] = rdd.flatMap(_.split(","))
      .map(word => {
        val prefix = (new util.Random()).nextInt(3)
        (prefix + "_" + word, 1)
      }).groupByKey()
    groupByKeyDD.map( tuple =>{
       val iterator = tuple._2.iterator
       val sum = iterator.sum
      (tuple._1,sum)
    }).map( tuple =>{
       val word = tuple._1.split("_")(1)
      (word,tuple._2)
    }).groupByKey()
      .map( tuple =>{
        val word = tuple._1
        val count = tuple._2.sum
        (word,count)
      }).foreach( tuple =>{
        println( tuple._1 + "  "+ tuple._2)
    })


  }

}
