package sparkExamples

import org.apache.spark.{SparkConf, SparkContext}
import java.lang.Math.random

import org.apache.spark.rdd.RDD

object SparkPi {

  def main(args: Array[String]): Unit = {

    val piConf = new SparkConf().setMaster("local[*]").setAppName("pi")
    val sc = new SparkContext(piConf)

    val num_partition: Int = if (args.length > 0) args(0).toInt else 10

    val num_point: Int = num_partition * 100

    val rdd = sc.parallelize(1 to (num_point), num_partition)

    val inOrOut: RDD[Int] = rdd.map(element => {
      val x = random() * 2 - 1
      val y = random() * 2 - 1
      if (x * x + y * y < 1) 1 else 0
    })

    val result = inOrOut.reduce(_ + _)

    println("π是：" + (4.0 * result/num_point))


  }
}
