package com.yanggu.spark.core.rdd.transform.keyvalue

import org.apache.spark.{SparkConf, SparkContext}

//Key-Value类型-CombineByKey算子
object RDD21_CombineByKey {

  def main(args: Array[String]): Unit = {

    //1. 创建sparkConf配置对象
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("spark")

    //2. 创建spark上下文对象
    val sparkContext = new SparkContext(sparkConf)

    //3. 从内存中创建RDD
    val rdd = sparkContext.makeRDD(List(("a", 88), ("b", 95), ("a", 91), ("b", 93), ("a", 95), ("b", 98)), 2)

    //4. CombineByKey
    //求每个key的平均值
    //def combineByKey[C](
    //      createCombiner: V => C, 当combineByKey第一次遇到值为k的Key时，调用createCombiner函数，将v转换为c
    //      mergeValue: (C, V) => C, combineByKey不是第一次遇到值为k的Key时，调用mergeValue函数，将v累加到c中
    //      mergeCombiners: (C, C) => C): RDD[(K, C)] 将两个c，合并成一个
    //)
    val value = rdd.combineByKey(
        (_, 1),
        (acc: (Int, Int), v) => (acc._1 + v, acc._2 + 1),
        (acc1: (Int, Int), acc2: (Int, Int)) => (acc1._1 + acc2._1, acc1._2 + acc2._2)
    )

    val value1 = value.map {
      case (key, (sum, number)) =>
        (key, sum.toDouble / number)
    }

    //5. 打印
    value1.collect().foreach(println)

    //6. 释放资源
    sparkContext.stop()
  }

}
