package cn.edu.spark.core

import org.apache.spark.sql.SparkSession

import java.lang.Thread.sleep

object BroadcastTest {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local[*]")
      .appName("Broadcast Test")
      .config("spark.broadcast.blockSize", 4096)
      .getOrCreate()

    val sc = spark.sparkContext

    val slices = 2
    val num = 1000000

    val arr1 = (0 until num).toArray

    for (i <- 0 until 3) {
      println(s"Iteration $i")
      println("===========")
      val startTime = System.nanoTime
      arr1(1) += 1
      val barr1 = sc.broadcast(arr1)
      val observedSizes = sc.parallelize(1 to 10, slices).map(_ => barr1.value(1))
      // Collect the small RDD so we can print the observed sizes locally.
      observedSizes.collect().foreach(i => println(i))
      println("Iteration %d took %.0f milliseconds".format(i, (System.nanoTime - startTime) / 1E6))
    }

    sleep(300000)
    spark.stop()
  }
}
