import org.apache.spark.{SparkConf, SparkContext}

/**
  * @author td
  * @date 2018/3/26
  */
object ReduceDemo {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("WordCount").setMaster("local");;
    val sc = new SparkContext(conf);
    val reduceRdd = sc.parallelize(List("a","b","c","d"),2);

    println(reduceRdd.partitions.length);
    val max = reduceRdd.reduce((a,b)=>{if (a>b) a else b});

    println(max)
    sc.stop();

  }

}
