package com.fwmagic.spark.core.transformations

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object CombineByKey2Demo {
    def main(args: Array[String]): Unit = {
        val conf: SparkConf = new SparkConf()
                .setAppName(this.getClass.getSimpleName)
                .setMaster("local[*]")

        val sc: SparkContext = new SparkContext(conf)

        val list: Array[(String, Double)] = Array(("Fred", 88.0), ("Fred", 95.0), ("Fred", 91.0), ("Wilma", 93.0), ("Wilma", 95.0), ("Wilma", 98.0))

        val rdd: RDD[(String, Double)] = sc.parallelize(list)

        //方式一：
        /*val combineByKeyRDD: RDD[(String, (Int, Double))] = rdd.combineByKey(
            x => (1, x),
            (a: (Int, Double), b: Double) => (a._1 + 1, a._2 + b),
            (m: (Int, Double), n: (Int, Double)) => (m._1 + n._1, n._2 + m._2)
        )

        val res: RDD[(String, Double)] = combineByKeyRDD.map(tp => {
            (tp._1, tp._2._2 / tp._2._1)
        })
        res.collect().foreach(println)*/

        //方式二：
        val combineByKeyRDD: RDD[(String, (Int, Double))] = rdd.combineByKey(
            score => (1, score),
            (c1: (Int, Double), newScore) => (c1._1 + 1, c1._2 + newScore),
            (c1: (Int, Double), c2: (Int, Double)) => (c1._1 + c2._1, c1._2 + c2._2)
        )

        combineByKeyRDD.map {
            case (name, (num, socre)) => (name, socre / num)
        }.collect.foreach(println)

        sc.stop()
    }

}
