package com.shujia.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.util.Random

object Demo15PI {
  def main(args: Array[String]): Unit = {
    // 构建Spark上下文环境
    val conf: SparkConf = new SparkConf()
    conf.setAppName("Demo12MapValues")
    conf.setMaster("local[*]")
    val sc: SparkContext = new SparkContext(conf)

    // nextDouble默认返回0,1之间的数
    // 将随机数范围变成 -1,1之间
    Random.nextDouble() * 2 - 1

    // 启动多少个Task去处理
    val numSlices: Int = 100

    val nums: Int = 1000000

    val numRDD: RDD[Int] = sc.parallelize(1 to nums * numSlices, numSlices)
    println(numRDD.getNumPartitions)


    val inside: Long = numRDD.map(i => { // 随机生成点
      (Random.nextDouble() * 2 - 1, Random.nextDouble() * 2 - 1)
    }).filter {
      case (x: Double, y: Double) =>
        // 找出圆内的点
        (x * x + y * y) <= 1
    }.count() // 统计圆内点的数量

    // 计算Π
    val PI: Double = inside * 4 / numSlices / nums.toDouble
    println(PI)


  }

}
