/*
 * Licensed to the Apache Software Foundation (ASF) under one or more
 * contributor license agreements.  See the NOTICE file distributed with
 * this work for additional information regarding copyright ownership.
 * The ASF licenses this file to You under the Apache License, Version 2.0
 * (the "License"); you may not use this file except in compliance with
 * the License.  You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
// scalastyle:off println
package edu.csl.study.spark.basic

import org.apache.spark.sql.SparkSession

import scala.math.random

/** Computes an approximation to pi */
object SparkPi {
    def main(args: Array[String]) {
        
        // TODO_MA 注释：获取编程入口
        val spark: SparkSession = SparkSession.builder.appName("Spark Pi").master("local[4]").getOrCreate()
        
        // TODO_MA 注释：计算并行度
        var slices = if (args.length > 0) args(0).toInt else 2
        var n = math.min(100000L * slices, Int.MaxValue).toInt // avoid overflow
        slices =2
        n = 10
        // TODO_MA 注释：并行化数据集得到RDD，并执行各种计算，得到结果集RDD
        /**
         * TODO_MA 马中华 https://blog.csdn.net/zhongqi2513
         *   注释： count 存在于drive 端的
         *   1、sparkContext怎么来的？
         */
        val count: Long = spark.sparkContext.parallelize(1 until n, slices).map { i =>
            val x = random * 2 - 1
            val y = random * 2 - 1
            println(   Thread.currentThread().getName+ "-map处理："+x)
            if (x * x + y * y <= 1) 1 else 0
            
            /**
             * TODO_MA 马中华 https://blog.csdn.net/zhongqi2513
             *   注释：  参数：匿名函数： _ + _   (x, y) => x + y
             *   reduce action 算子
             *   这里面会触发任务执行
             *   sc.runJob();
             */

        }.reduce( (x,y) => {
            println(Thread.currentThread().getName + "-reduce处理：" + x)
            x + y
          })         // reduce是一个action算子，会触发 job 的执行
        
        // TODO_MA 注释：输出结果
        println(s"Pi is roughly ${4.0 * count / (n - 1)}")
        
        // TODO_MA 注释：停止程序
        spark.stop()
    }
}

// scalastyle:on println
