package com.xiaojiezhu.spark.rdd.action

import org.apache.spark.{SparkConf, SparkContext}

/**
  * @author 朱小杰
  *         时间 2017-11-26 .19:24
  *         说明 ...
  */
object ScalaReduce {

  def main(arg : Array[String]): Unit ={
    val conf = new SparkConf().setMaster("local").setAppName("app")
    val sc = new SparkContext(conf)
    val rdd1 = sc.parallelize(List(1,2,3,4))

    val result = rdd1.reduce((x,y) => x + y)

    val r2 = rdd1.fold(10)((x,y) => x+y)

    println(result)
    println(r2)

  }
}
