package org.wj.arithmetic

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object CombineByKey extends App {

  val conf = new SparkConf().setMaster("local").setAppName(this.getClass.getName)

  val sc = new SparkContext(conf)

  private val rdd: RDD[(String, Float)] = sc.parallelize(Array(("班级一", 93f), ("班级二", 94f), ("班级一", 94f), ("班级二", 93f), ("班级二", 94f), ("班级二", 94f), ("班级二", 94f), ("班级二", 94f)))

  private val classScoreAvg: RDD[(String, (Int, Float))] = rdd.combineByKey(score => (1, score), (c1: (Int, Float), newScore) => (c1._1 + 1, c1._2 + newScore), (c2: (Int, Float), c3: (Int, Float)) => (c2._1 + c3._1, c2._2 + c3._2))

  classScoreAvg.foreach(println(_))


}
