package com.shujia.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.util.Random

object CountPi {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()

    conf.setAppName("计算圆周率")
//    conf.setMaster("local")

    val sc = new SparkContext(conf)

    val inclusive: Range.Inclusive = 1 to 1000000
    val numRDD: RDD[Int] = sc.parallelize(inclusive)

    //这里使用map代替for循环，每次map生成一个坐标
    val allZuoBiaoRDD: RDD[(Double, Double)] = numRDD.map((e: Int) => {
      //生成x轴
      val x: Double = Random.nextDouble() * 2 - 1
      //生成y轴
      val y: Double = Random.nextDouble() * 2 - 1
      (x, y)
    })

    //过滤出圆内的坐标
    val yuanZuoBiaoRDD: RDD[(Double, Double)] = allZuoBiaoRDD.filter((kv: (Double, Double)) => {
      Math.sqrt(Math.pow(kv._1, 2) + Math.pow(kv._2, 2)) <= 1
    })

    println("="*50)
    println(s"圆周率的计算结果为：${(yuanZuoBiaoRDD.collect().size.toDouble / allZuoBiaoRDD.collect().size.toDouble) * 4}")
    println("="*50)

  }
}

/**
 * spark作业提交：
 *  standalone:
 *    client:
 *      spark-submit --class com.shujia.core.CountPi --master spark://master:7077 --executor-memory 512m --total-executor-cores 1 ../jars/spark-1.0-SNAPSHOT.jar
 *    cluster:
 *      spark-submit --class com.shujia.core.CountPi --master spark://master:7077 --executor-memory 512M --total-executor-cores 1 --deploy-mode cluster spark-1.0-SNAPSHOT.jar
 *
 *  yarn:
 *    client:
 *      spark-submit --class com.shujia.core.CountPi --master yarn --deploy-mode client --executor-memory 512m --total-executor-cores spark-1.0-SNAPSHOT.jar
 *    cluster:
 *      spark-submit --class com.shujia.core.CountPi --master yarn --deploy-mode cluster spark-1.0-SNAPSHOT.jar
 *
 *    查看yarn-cluster模式下的日志：yarn logs -applicationId xxx
 */
