
package cn.edu.spark.core

import org.apache.spark.sql.SparkSession

import java.lang.Thread.sleep
import java.util.Random


object GroupByTest {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder
      .master("local[*]")
      .appName("GroupBy Test")
      .getOrCreate()

    val numMappers = spark.sparkContext.defaultParallelism
    val numKVPairs = 1000
    val valSize = 1000
    val numReducers = numMappers

    val pairs1 = spark
      .sparkContext
      .parallelize(0 until numMappers, numMappers)
      .flatMap {
        _ =>
          val ranGen = new Random
          val arr1 = new Array[(Int, Array[Byte])](numKVPairs)
          for (i <- 0 until numKVPairs) {
            val byteArr = new Array[Byte](valSize)
            ranGen.nextBytes(byteArr)
            arr1(i) = (ranGen.nextInt(), byteArr)
          }
          arr1
      }
      .cache()
    // Enforce that everything has been calculated and in cache
    println(pairs1.count())

    println(pairs1.groupByKey(numReducers).count())

    sleep(300000)
    spark.stop()
  }
}

