package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.immutable
import scala.util.Random

object Demo18PI {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
      .setAppName("pi")
      .setMaster("local")

    val sc = new SparkContext(conf)

    val list: Range = 0 until 100000000

    val listRDD: RDD[Int] = sc.parallelize(list)

    val pointRDD: RDD[(Double, Double)] = listRDD.map(i => {

      val x: Double = Random.nextDouble()

      val y: Double = Random.nextDouble()

      (x,y)
    })

    val ci: RDD[(Double, Double)] = pointRDD.filter(point =>{
      val x: Double = point._1*2 -1
      val y: Double = point._2*2 -1
      val l = x*x + y*y
      l <= 1
    })

    //ci.foreach(println)
    val pi: Double = ci.count().toDouble / list.size.toDouble * 4

    println(pi)

  }

}
