package cn.edu.spark.core


import org.apache.spark.sql.SparkSession

import java.lang.Thread.sleep

object AccumulatorMetricsTest {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession
      .builder()
      .master("local")
//      .config("spark.metrics.conf.*.sink.console.class", "org.apache.spark.metrics.sink.ConsoleSink")
      .getOrCreate()

    val sc = spark.sparkContext

    val acc = sc.longAccumulator("my-long-metric")

    val acc2 = sc.doubleAccumulator("my-double-metric")


    val startTime = System.nanoTime

    val accumulatorTest = sc
      .parallelize(1 to 1000000)
      .foreach{
        _ =>
          acc.add(1)
          acc2.add(1.1)
      }

    // Print a footer with test time and accumulator values
    println("Test took %.0f milliseconds".format((System.nanoTime - startTime) / 1E6))
    println("Accumulator values:")
    println("*** Long accumulator (my-long-metric): " + acc.value)
    println("*** Double accumulator (my-double-metric): " + acc2.value)

    sleep(300000)
    spark.stop()
  }
}
