package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.math.random

object Demo26PI {
  def main(args: Array[String]): Unit = {
    // 随机生成一个点
    //    for (i <- 0 until 100) {
    //      println((random * 2 - 1, random * 2 - 1))
    //    }

    val pointNum: Int = 1000000000
    val conf: SparkConf = new SparkConf()
    conf.setMaster("local[*]")
    conf.setAppName("Demo26PI")

    val sc: SparkContext = new SparkContext(conf)
    val rdd: RDD[Int] = sc.parallelize(0 until pointNum, numSlices = 10)

    val pointInCircleNum: Long = rdd
      .map(i => (random * 2 - 1, random * 2 - 1))
      .map(kv => {
        kv._1 * kv._1 + kv._2 * kv._2
      })
      .filter(_ <= 1)
      .count()


    // 计算PI = 4 * 落在圆内点的数量 / 所有点的数量
    println(s"Pi的值为：${4.0 * pointInCircleNum / pointNum}")

    while (true){

    }

  }

}
