package com.shujia.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.util.Random

object Demo16PI {
  def main(args: Array[String]): Unit = {
    //创建配置文件对象
    val conf = new SparkConf()
    //指定spark执行默认，local：本地执行
    conf.setMaster("local[*]")
    //spark 任务名
    conf.setAppName("wc")
    //创建spark上下文对象
    val sc = new SparkContext(conf)

    val rdd: RDD[Int] = sc.parallelize(1 to 100000000)

    //随机生成点
    val pointsRDD: RDD[(Double, Double)] = rdd
      .map(i => {
        val x: Double = Random.nextDouble() * 2 - 1
        val y: Double = Random.nextDouble() * 2 - 1
        (x, y)
      })

    //取出圆内的点
    val yuanRDD: RDD[(Double, Double)] = pointsRDD
      .filter {
        case (x, y) => x * x + y * y <= 1
      }

    //计算PI
    val PI: Double = yuanRDD.count().toDouble / pointsRDD.count() * 4

    println(PI)
  }
}
