package com.larry.spark.rdd.transform

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object RDD_Oper_CombineByKey {

  def main(args: Array[String]): Unit = {
    //TODO  使用spark combineByKey

    val conf = new SparkConf().setMaster("local[*]").setAppName("rdd")
    val sc = new SparkContext(conf)

    val rdd = sc.makeRDD(
      List(("a", 88), ("b", 95), ("a", 91), ("b", 93), ("a", 95), ("b", 98)),2
    )
    /*
    createCombiner: V => C, 当数据不符合规则时，用于转换操作
      mergeValue: (C, V) => C,  分区内规则
      mergeCombiners: (C, C) => C,  分区间规则
      partitioner: Partitioner, 分区改变
      mapSideCombine: Boolean = true, 预聚合
      serializer: Serializer = null 序列化

      ("a", 88)
      ("a", (88,1)) ("a", 91)
      ("a", (88 + 91,2))  ("a", (95,1))
     */
    val rdd1 = rdd.combineByKey(
      num => (num, 1),
      (x: (Int, Int), y) => {
        (x._1 + y, x._2 + 1)
      },
      (x: (Int, Int), y: (Int, Int)) => {
        (x._1 + y._1, x._2 + y._2)
      }
    )
    //(b,(286,3))
    val rdd2 = rdd1.mapValues(
      l => {
        l._1 / l._2
      }
    )
    rdd2.collect().foreach(println)

    sc.stop()
  }
}
