package com.atbeijing.bigdata.spark.core.rdd.operator.action

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Spark04_Oper_Action {

    def main(args: Array[String]): Unit = {

        val conf = new SparkConf().setMaster("local[*]").setAppName("ActionOperator")
        val sc = new SparkContext(conf)

        // TODO 算子 - 行动

        // TODO aggregate
        // TODO aggregateByKey
        // aggregate算子的初始值再分区内和分区间都会参与运算
        val rdd : RDD[Int] = sc.makeRDD(List(4,2,3,1),2)
        // 5, 4, 2 = 11
        // 5, 3, 1 = 9
        // 5 , 11, 9 = 25
        val i: Int = rdd.aggregate(5)(_ + _, _ + _)
        println(i)//25
        //分区内和分区间计算逻辑相同时aggregate可简化为fold
        val j: Int = rdd.fold(5)(_ + _)
        println(j)//25
        sc.stop()

    }
}
