package cn.rslee.java.demos.test

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.omg.DynamicAny._DynAnyStub

object Reduce {
  def main(args: Array[String]): Unit = {
    val sc = new SparkContext(new SparkConf().setAppName("ReduceByKey").setMaster("local"));
    val rdd = sc.parallelize(Array(1, 2, 3, 4), 1)
    println(rdd.reduce(_ + _))

    val dataList = List(50.0, 40.0, 40.0, 70.0)
    val dataRDD = sc.makeRDD(dataList)
    val maxValue = dataRDD.reduce(_ max _)
    println("maxValue1="+maxValue);
    val maxValue2 = dataRDD.glom().map(value=>value.max).reduce(_ max _);
     println("maxValue2="+maxValue2);
     dataRDD.glom().foreach(println)
     
     
     val a = sc.parallelize(1 to 100, 3)
     a.glom.collect().foreach(println)
  }
}