package com.shujia.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.util.Random

object Demo17Pi {
  def main(args: Array[String]): Unit = {

    // 随机生成1w个范围为(-1,1)点

    var cnt: Int = 1
    val total: Int = 100
    while (cnt <= total) {
      val x: Double = Random.nextDouble() * 2 - 1 // 0~1的小数
      val y: Double = Random.nextDouble() * 2 - 1
      println((x, y))
      cnt += 1
    }
    // [0,10]
    for (i <- 0 to 10) {
      println(i)
    }
    // [0,10)
    for (i <- 0 until 10) {
      println(i)
    }
    // [0,10)
    for (i <- Range(0, 10, 1)) {
      println(i)
    }

    val range: Range = 0 until 10


    // 通过可以遍历的序列创建RDD
    val conf: SparkConf = new SparkConf()
    conf.setAppName("Demo17Pi")
    conf.setMaster("local[*]")

    val sc: SparkContext = new SparkContext(conf)

    val pointCnt: Int = 10000000
    val mapTaskNums: Int = 10

    val pointRDD: RDD[(Double, Double)] = sc.parallelize(1 to mapTaskNums * pointCnt, mapTaskNums)
      // 总共有 pointCnt * mapTaskNums 条数据
      .map(i => {
        val x: Double = Random.nextDouble() * 2 - 1 // 0~1的小数
        val y: Double = Random.nextDouble() * 2 - 1
        (x, y)
      })

    // 统计点落在圆内的数量
    val pointCntRDD: RDD[(String, Int)] = pointRDD
      .map {
        case (x: Double, y: Double) =>
          if (x * x + y * y <= 1) {
            ("圆内", 1)
          } else {
            ("圆外", 1)
          }
      }.coalesce(2).reduceByKey(_ + _)

    pointCntRDD.foreach(println)

    val pointInCycleCnt: Int = pointCntRDD
      .filter(kv => "圆内".equals(kv._1))
      .collect()(0)._2

    println(s"圆内的点的数量：$pointInCycleCnt")

    // 计算Pi
    val Pi: Double = pointInCycleCnt.toDouble * 4 / (mapTaskNums * pointCnt)
    println(s"Pi的值为: $Pi")


  }
}
