package com.yanggu.spark.core.rdd.transform.keyvalue

import org.apache.spark.{SparkConf, SparkContext}

//Key-Value类型-FoldByKey算子
object RDD20_FoldByKey {

  def main(args: Array[String]): Unit = {

    //1. 创建sparkConf配置对象
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("spark")

    //2. 创建spark上下文对象
    val sparkContext = new SparkContext(sparkConf)

    //3. 从内存中创建RDD
    val rdd = sparkContext.makeRDD(Array(("a", 1), ("b", 2), ("b", 3), ("a", 3), ("b", 4), ("a", 5)), 2)

    //("a", 1), ("b", 2), ("b", 3) => ("a", 1), ("b", 5)
    //("a", 3), ("b", 4), ("a", 5) => ("a", 8), ("b", 4)

    //4. FoldByKey
    //当分区内的计算规格和分区间的计算规则相同时, 可以使用foldByKey代替aggregateByKey
    val value = rdd.foldByKey(0)(_ + _)

    //5. 打印
    value.collect().foreach(println)

    //6. 释放资源
    sparkContext.stop()
  }

}
