package com.shujia.spark

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.immutable
import scala.util.Random

object Demo15PI {
  def main(args: Array[String]): Unit = {

    //快速生成一个大集合
    val list: immutable.Seq[Int] = 0 to 10000000


    val conf: SparkConf = new SparkConf()
      .setMaster("local")
      .setAppName("map")


    val sc = new SparkContext(conf)


    //将scala集合转换成RDD
    val dataRDD: RDD[Int] = sc.parallelize(list)


    //随机生成点
    val pointRDD: RDD[(Double, Double)] = dataRDD.map(i => {

      //随机生成点（x,y）  范围是-1，1

      val x: Double = Random.nextDouble() * 2 - 1
      val y: Double = Random.nextDouble() * 2 - 1

      (x, y)
    })

    //取出园内的点
    val filterRDD: RDD[(Double, Double)] = pointRDD.filter {
      case (x: Double, y: Double) => {
        x * x + y * y <= 1
      }
    }

    //通过公式计算pi
    val pi: Double = filterRDD.count().toDouble / list.length * 4

    println("pi = " + pi)


  }

}
