package com.spark.statistics

import org.apache.spark.mllib.random.RandomRDDs
import org.apache.spark.mllib.random.RandomRDDs._
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by Administrator on 2017/8/23.
  */
object random {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local").setAppName("statistics")
    val sc = new SparkContext(conf)

    val randomNum = normalRDD(sc, 100)
    randomNum.foreach(println)
    println("uniformRDD:")
    uniformRDD(sc, 10).foreach(println)
    println("poissonRDD:")
    poissonRDD(sc, 5, 10).foreach(println)
    println("exponentialRDD:")
    exponentialRDD(sc, 7, 10).foreach(println)
    println("gammaRDD:")
    gammaRDD(sc, 3, 3, 10).foreach(println)
    sc.stop

  }


}
