package com.zhaosc.spark.core

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext

object GroupByKeyOperator {
  def main(args: Array[String]) {
    val conf = new SparkConf().setAppName("GroupByKeyOperator").setMaster("local")
    val sc = new SparkContext(conf)

    val scoreList = Array(Tuple2("xuruyun", 150), Tuple2("liangyongqi", 100),
      Tuple2("wangfei", 100), Tuple2("wangfei", 80))
    val scores = sc.parallelize(scoreList)
    /**
     * wangfei [100,80]
     * xuruyun [150]
     * liangyongqi [100]
     */
    val groupedScores = scores.groupByKey()
    val groupedScores2 = scores.groupByKey().map(v => {
      val key = v._1
      val it = v._2.iterator;
      var num = 0;
      if (it.hasNext) {
        num = num + it.next();
      }
      new Tuple2(key, num);
    });

    val reduceScores = scores.reduceByKey((v1, v2) => {
      v1 + v2
    })

    groupedScores.foreach(score => {
      println(score._1)
      score._2.foreach(everyScore => println(everyScore))
      println("=========================")
    })

    groupedScores2.foreach(score => {
      println(score._1)
      println(score._2)
      println("=========================")
    })
    
    reduceScores.foreach(println(_))
  }
}