package com.xbai.spark.core.transform

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * foldByKey 算子
  * 作用：aggregateByKey 的简化操作，seqop 和 combop 相同
  * 参数：(zeroValue: V)(func: (V, V) => V): RDD[(K, V)]
  * 案例：创建一个pairRDD，计算相同key对应值的相加结果
  *
  * @author xbai
  * @Date 2020/12/31
  */
object Spark19_FoldByKey {

  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("foldByKey")
    val sc = new SparkContext(conf)

    val rdd: RDD[(Int, Int)] = sc.parallelize(List((1,3),(1,2),(1,4),(2,3),(3,6),(3,8)),3)
    val foldByKeyRDD: RDD[(Int, Int)] = rdd.foldByKey(0)(_ + _)
    foldByKeyRDD.collect().foreach(println)
  }
}
