package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.immutable
import scala.util.Random

object Demo19SparkPi {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local[4]")
    conf.setAppName("join")

    val sc = new SparkContext(conf)

    //构建一个很大的集合
    val list: immutable.Seq[Int] = 0 until 1000000

    //将集合转换成RDD
    val listRDD: RDD[Int] = sc.parallelize(list, 8)

    //随机生成点
    val pointsRDD: RDD[(Double, Double)] = listRDD.map(i => {
      //随机生成x和y
      val x: Double = Random.nextDouble() * 2 - 1
      val y: Double = Random.nextDouble() * 2 - 1
      (x, y)
    })

    //取出圆内的点
    val ynRDD: RDD[(Double, Double)] = pointsRDD.filter {
      case (x: Double, y: Double) =>
        //到圆心的聚合小于1，说明点在圆内
        x * x + y * y < 1
    }

    //计算圆内点的数量/总的点的数量
    val PI: Double = ynRDD.count().toDouble / pointsRDD.count() * 4.0

    println(s"PI = $PI")

  }

}
