package com.shujia.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.util.Random

object Demo21SparkPI {
  def main(args: Array[String]): Unit = {
    var taskNum: Int = 1
    // 接收一个参数 表示 生成的Task的数量
    if (!args.isEmpty) {
      taskNum = args(0).toInt
    }

    // 批量 随机生成范围为（-1,1）点
    for (i <- 1 to 100) {
      println((Random.nextDouble() * 2 - 1, Random.nextDouble() * 2 - 1))
    }

    val range: Range.Inclusive = 1 to 100
    println(range.toList)

    val conf: SparkConf = new SparkConf()
    conf.setAppName("Demo21SparkPI")
    conf.setMaster("local[*]")

    val sc: SparkContext = new SparkContext(conf)

    // 控制每个分区生成的点的数量
    val pointNumPerMap: Int = 10000000

    // 构建RDD 随机生成点
    val rdd: RDD[Int] = sc
      .parallelize((1 to pointNumPerMap * taskNum).toList, taskNum)

    // 查看rdd的分区数量
    println(rdd.getNumPartitions) // 这里的getNumPartitions不是算子，而是RDD的一个属性

    val nums: Long = rdd
      // 随机生成点
      .map(i => {
        (Random.nextDouble() * 2 - 1, Random.nextDouble() * 2 - 1)
      })
      // 找出圆内的点
      .filter(point => {
        val distance: Double = point._1 * point._1 + point._2 * point._2
        distance <= 1
      })
      // 获取在圆内的点的数量
      .count()

    // 计算Π
    val PI: Double = nums * 4 / (pointNumPerMap * taskNum.toDouble)

    println(s"PI的值为：$PI")

  }

}
