package com.dxf.bigdata.D05_spark_again

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * 分区内 a规则 + 分区间a规则
 * 和 reduceByKey 相同功能
 */
object FoldByKey {

  def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("app")

    val sc = new SparkContext(sparkConf)

    val value: RDD[(String, Int)] = sc.makeRDD(List(("a", 1), ("a", 2), ("c", 3), ("a", 4)))

    // 和reduceByKey 类似,区别是有初始值zeroValue
    value.foldByKey(0)(_ + _).collect().foreach(println)






  }

}
