package sparkcore.day3.lesson02

import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by Administrator on 2018/4/24.
  */
object AggWordCount {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local").setAppName("test")
    val sc = new SparkContext(conf)

    val array = Array("you,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump","jump,jump")
    val rdd = sc.parallelize(array)

    rdd.flatMap(_.split(","))
        .map( word =>{
          val prefix = (new util.Random()).nextInt(3)
          (prefix+"_"+word,1)
        }).reduceByKey(_+_)
      .map( tuple =>{
        val word = tuple._1.split("_")(1)
        (word,tuple._2)
      }).reduceByKey(_+_)
      .foreach( tuple =>{
        println(tuple._1 + "  "+ tuple._2)
      })


    sc.stop()
  }

}
