package com.catmiao.spark.rdd.operator.action

import org.apache.spark.{SparkConf, SparkContext}

/**
 * @title: RDD_Operator_01_action
 * @projectName spark_study
 * @description: TODO
 * @author ChengMiao
 * @date 2024/2/22 22:47
 */
object RDD_Operator_03_action {

  def main(args: Array[String]): Unit = {
    val sparkCon = new SparkConf().setMaster("local[*]").setAppName("rdd")

    val sparkContext = new SparkContext(sparkCon)

    val rdd = sparkContext.makeRDD(List(1,2,3,4),2)


    /**
     * aggregateByKey：初始值只会参与分区内计算 [(10 + 3) + (10 +7)]
     * aggregate：初始值不仅参与分区内的计算，还参与分区间的计算 [ 10 + (10+3) + (10+7)]
     */
//    val result: Int = rdd.aggregate(10)(_ + _, _ + _)
    val result: Int = rdd.fold(10)(_ + _)


    println(result)

    sparkContext.stop()

  }

}
