package com.hzh.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.util.Random

object Demo20PI {
  def main(args: Array[String]): Unit = {
    /**
     * 创建spark环境
     */
    val conf = new SparkConf()
    conf.setAppName("Demo20PI")
    conf.setMaster("local")
    val sc = new SparkContext(conf)
    //生成一个大集合
    val list: Range.Inclusive = 0 to 100000
    //构建一个很大的RDD
    val rdd: RDD[Int] = sc.parallelize(list)

    //随机生成正方形内的点
    val squareRDD: RDD[(Double, Double)] = rdd.map(i => {
      //随机生成x和y，范围是[-1,1]
      val x: Double = Random.nextDouble() * 2 - 1
      val y: Double = Random.nextDouble() * 2 - 1
      (x, y)
    })
    //取出圆内的点

    val circleRDD: RDD[(Double, Double)] = squareRDD.filter {
      case (x, y) =>
        x * x + y * y < 1
    }

    val PI: Double = 4.0 * circleRDD.count() / squareRDD.count()

    println(PI)

  }
}
