package com.doit.day02

import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Author:
 * @WX: 17710299606
 * @Tips: 学大数据 ,到多易教育
 * @DOC: https://blog.csdn.net/qq_37933018?spm=1000.2115.3001.5343
 * @Description:
 */
object Demo07Action {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setMaster("local[*]")
      .setAppName(this.getClass.getSimpleName)
    val sc = SparkContext.getOrCreate(conf)


    val rdd = sc.parallelize(List(1, 2, 3, 4, 5, 6), 3)
    rdd.saveAsTextFile("")
    rdd.sortBy(e=>e)

    /* 10 *2 = 20
       10*12 =  120
       10*30  = 300
     10*20*120*300
     10*200*60*/

    // rdd.foreach(println)
    // rdd.saveAsTextFile("")
    //rdd.take(3).toList.foreach(println)
    // println(rdd.first())
    // val i = rdd.min()
    // val i = rdd.max()
    // println(rdd.count())
    //  val ints: Array[Int] = rdd.collect
    // val intToInt: collection.Map[Int, Int] = rdd.zip(rdd).collectAsMap()
    /* val i = rdd.reduce(_ + _)
     println(i)
 */

    val res = rdd.aggregate(10)(_ * _, _ * _)


    println(res)

  }

}
