package com.bigdata.spark.core.rdd.operator.transform

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
 * @author : ranzlupup
 * @date : 2023/3/1 16:14
 */
object RDD_Transform_KV_foldByKey {
    def main(args: Array[String]): Unit = {
        val sparkConf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("RDD")
        val sparkContext: SparkContext = new SparkContext(sparkConf)
        val dataRDD1: RDD[(String, Int)] = sparkContext.makeRDD(List(("a", 1), ("b", 2), ("c", 3), ("a", 2), ("c", 5)), 2)

        // TODO foldByKey
        //      当分区内计算规则和分区间计算规则相同时
        //      aggregateByKey 就可以简化为 foldByKey
        dataRDD1
            .aggregateByKey(0)(_ + _, _ + _)
            .collect()
            .foreach(println)
        println("=======================")
        dataRDD1
            .foldByKey(0)(_+_)
            .collect()
            .foreach(println)

        sparkContext.stop()
    }
}
