package com.spark.statistics

import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.stat.Statistics
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by Administrator on 2017/8/22.
  */
object basestatistics {

  def main(args: Array[String]): Unit = {
    val conf=new SparkConf().setMaster("local").setAppName("statistics")
    val sc=new SparkContext(conf)
    val rdd=sc.parallelize(List(1,2,3,4,5,6,7,8,9)).map(i=>Vectors.dense(i))

    val summary=Statistics.colStats(rdd)

    println(summary.mean)
    println(summary.variance)
    println(summary.numNonzeros)


    val rdd1=sc.parallelize(List(1,2,3,4,5,6,7,8,9),4)
    val res=rdd1.aggregate(3)(seq,comb)
    println(res)


  }


  def  seq(a:Int,b:Int):Int={
    println("seq:"+a+":"+b)
    math.min(a,b)
  }
  def  comb(a:Int,b:Int):Int={
    println("comb:"+a+":"+b)
    a+b
  }

}
