package cn.hnu.spark

import org.apache.commons.lang3.StringUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object RddDemo03 {
  def main(args: Array[String]): Unit = {
    //创建Rdd
    val conf: SparkConf = new SparkConf().setAppName("Rdd-demo").setMaster("local[2]")
    val sc = new SparkContext(conf)
    sc.setLogLevel("WARN")
    //无key的聚合操作
    val rdd1: RDD[Int] = sc.parallelize(1 to 10)
    val sum: Double = rdd1.sum()
    val sum1: Int = rdd1.reduce(_ + _)
    //设置一个初始值
    val sum2: Int = rdd1.fold(0)(_ + _)
    //设置初始值， 分区局部聚合操作，全局聚合操作
    //  1,2,3,4,5
    //  6,7,8,9,10
    val sum3: Int = rdd1.aggregate(0)(_ + _, _ + _)

  }
}
