package com.leal.client

import com.leal.util.DateUtil
import org.apache.spark.sql.SparkSession
import scala.math.random

import scala.concurrent.{Await, ExecutionContext, Future}
import ExecutionContext.Implicits.global
import scala.collection.mutable.ArrayBuffer

/**
 * @projectName com.leal.client
 * @description:
 * @author leal123
 * @date 2023/3/30 21:56
 */
object MultiExample {
  private def getTaskById(spark: SparkSession, slices: Int): Future[String] = {
    Future {
      // Thread.sleep(slices * 1000) // 模拟耗时操作
      //      val slices = if (args.length > 0) args(0).toInt else 2
      val n = 100000 * slices
      val count = spark.sparkContext.parallelize(1 to n, slices)
        .map { i =>
          val x = random * 2 - 1
          val y = random * 2 - 1
          if (x * x + y * y <= 1) 1 else 0
        }
        .reduce(_ + _)
      val pi = 4.0 * count / n

      DateUtil.getCurrentTime() + s" ==>Task $slices completed. Pi is roughly $pi"
    }
  }

  def main(args: Array[String]): Unit = {
    println("任务开始运行： " + DateUtil.getCurrentTime())
    val spark = SparkSession.builder()
      .appName("Pi")
      .master("local[*]")
      .getOrCreate()
    val tasks = ArrayBuffer[Future[String]]()
    //      List(task1(), task2())
    for (i <- 1 to 10) {
      tasks.append(getTaskById(spark, i * 100))
    }
    val results = Future.sequence(tasks)

    val output = Await.result(results, scala.concurrent.duration.Duration.Inf)

    output.foreach(println)
    println("任务结束运行： " + DateUtil.getCurrentTime())
  }
}
