package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.util.Random

object Demo17PI {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()

    conf.setMaster("local[8]")
    conf.setAppName("pi")

    val sc = new SparkContext(conf)

    //生成一个很大的rdd
    val listRDD: RDD[Int] = sc.parallelize(0 to 1000000000, 8)

    //随机生成点
    val pointRDD: RDD[(Double, Double)] = listRDD.map(i => {
      //随机生成x和y , -1到1
      val x: Double = Random.nextDouble() * 2 - 1
      val y: Double = Random.nextDouble() * 2 - 1
      (x, y)
    })

    //多多次的使用的rd进行缓存
    //pointRDD.cache()

    //取出圆内的点
    val yuanneiPoint: RDD[(Double, Double)] = pointRDD.filter {
      case (x: Double, y: Double) => x * x + y * y <= 1
    }

    //季孙Pi
    val Pi: Double = yuanneiPoint.count().toDouble / pointRDD.count() * 4

    println(s"PI is $Pi")

  }

}
