package rdd

import org.apache.spark.{SparkConf, SparkContext}

object RDD_ACC {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local[*]")
    conf.setAppName("RDD_ACC")

    val sc = new SparkContext(conf)
    val rdd = sc.parallelize(1 to 20 by 2)
    println("RDD中的元素："+rdd.collect().mkString(","))
    println("RDD中所有元素的和："+rdd.sum())
    println("RDD中所有元素的最大值："+rdd.max())
    println("RDD中所有元素的最大值："+rdd.min())
    println("RDD中所有元素的个数："+rdd.count())
    println("RDD中所有元素的平均值："+rdd.mean())
    println("RDD中所有元素的方差："+rdd.variance())
    println("RDD中所有元素标准差："+rdd.stdev())

    val statCounter = rdd.stats()
    statCounter.max
    statCounter.min
    statCounter.count
    statCounter.mean
    statCounter.variance
    statCounter.stdev


    sc.stop()
  }
}
