package com.shujia.spark

import org.apache.spark.{SparkConf, SparkContext}

import scala.util.Random

object Demo13SparkPi {
  def main(args: Array[String]): Unit = {


    val list = 0 to 100000000
    println(list)

    val conf = new SparkConf()
      .setAppName("Demo13SparkPi")
        // 和--maste作用一样  但是代码里面优先级比spark-submit 优先级高
      //.setMaster("local[8]")

    val sc = new SparkContext(conf)

    val rdd = sc.parallelize(list, 100)

    val rdd2 = rdd.map(i => {
      //随机生成x和y   范围 -1 到 1
      val x = Random.nextFloat() * 2 - 1
      val y = Random.nextFloat() * 2 - 1
      (x, y)
    })
      .filter(point => {
        val x = point._1
        val y = point._2

        //判断点是否在圆内
        x * x + y * y <= 1
      })

    //计算pi
    val pi = (rdd2.count() / list.length.toDouble) * 4

    println("pi:" + pi)

  }
}
