package com.bdqn.spark.chapter05.kv

import org.apache.spark.{SparkConf, SparkContext}

object Spark21_RDD_Operator_Transform {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf()
      .setMaster("local[*]")
      .setAppName("operator-aggregateByKey")
    val sc = new SparkContext(sparkConf)

    // 算子 - (Key - Value类型)

    val rdd = sc.makeRDD(List(
      ("a", 1), ("a", 2), ("a", 3), ("a", 4)
    ), 2)

    rdd.aggregateByKey(0)(
      // 分区内计算
      (mid, `val`) => {
        mid + `val`
      },
      // 分区间计算
      (x, y) => {
        x + y
      }
    ).collect().foreach(println)

    // aggregateByKey 分区内和分区间的计算规则一样的情况下，可以简化为 foldByKey
    rdd.foldByKey(0)(_ + _).collect().foreach(println)


    sc.stop()
  }
}
