package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.util.Random

object Demo17PI {
  def main(args: Array[String]): Unit = {


    //环境配置对象
    val conf = new SparkConf()

    //指定任务名
    conf.setAppName("pi")
    conf.setMaster("local")


    //创建spark 环境,式写spark代码的入口
    val sc = new SparkContext(conf)


    val list: Range.Inclusive = 0 to 100000000

    //构建一个很大的rdd
    val rdd: RDD[Int] = sc.parallelize(list)


    //迅即生成点

    val points: RDD[(Double, Double)] = rdd.map(i => {
      //随机生成x,y  [-1,1]
      val x: Double = Random.nextDouble() * 2 - 1
      val y: Double = Random.nextDouble() * 2 - 1

      (x, y)
    })

    //计算园内点的数量
    val yPoint: RDD[(Double, Double)] = points.filter(i => {
      val x: Double = i._1
      val y: Double = i._2
      x * x + y * y <= 1
    })


    //带入公式计算pi
    val pi: Double = 4.0 * yPoint.count() / list.length

    println(pi)

  }

}
