package com.shujia.core

import org.apache.spark.internal.Logging
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.slf4j.Logger

import scala.util.Random

object Demo26SparkPI extends Logging {
  val logger: Logger = log

  def main(args: Array[String]): Unit = {
    // 使用Random工具随机生成点
    // nextDouble会随机生成(0,1)范围内的点，如果需要(-1,1) 范围内的点，可以先乘以2再减1实现
    logger.info(Random.nextDouble() + "," + Random.nextDouble())
    logger.info(s"${Random.nextDouble() * 2 - 1},${Random.nextDouble() * 2 - 1}")

    // 如何批量生成多个点
    val pointNum: Int = 10000000
    if (args.isEmpty) {
      logger.error("请指定MapTask的数量！")
      return
    }
    val mapTaskNum: Int = args.head.toInt

    logger.info(s"生成${mapTaskNum}个MapTask")

    // 随机生成pointNum个点
    //    for (elem <- 1 to 100) {
    //      println(Random.nextDouble() * 2 - 1, Random.nextDouble() * 2 - 1)
    //    }

    // 怎么结合Spark进行数据的处理
    // 如何构建一个RDD，拥有pointNum个点
    val conf: SparkConf = new SparkConf()
    conf.setAppName("Demo26SparkPI")
    conf.setMaster("local[*]")
    val sc: SparkContext = new SparkContext(conf)

    val pointRDD: RDD[(Double, Double)] = sc.parallelize(1 to pointNum * mapTaskNum, mapTaskNum)
      .map(i => (Random.nextDouble() * 2 - 1, Random.nextDouble() * 2 - 1))

    println(s"pointRDD的分区数为:${pointRDD.getNumPartitions}")

    val inCirclePointNum: Int = pointRDD
      // 如何判断点在圆内？ x*x+y*y <= 1
      .map(point => {
        val x: Double = point._1
        val y: Double = point._2
        if (x * x + y * y <= 1) {
          1
        } else {
          0
        }
      })
      // 对1 0 1 0 做sum，即可统计在圆内点的数量
      .reduce(_ + _)

    // 计算 Π
    val PI: Double = 4.0 * inCirclePointNum / (pointNum * mapTaskNum)
    println(s"PI值为：$PI")


  }

}
