package com.haozhen.rdd

/**
  * @author haozhen
  * @email haozh@ync1.com
  * @date 2021/1/30  19:13
  */
object AccumulateDemo {

  def main(args: Array[String]): Unit = {
    import org.apache.spark.util.LongAccumulator
    import org.apache.spark.{SparkConf, SparkContext}
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName(this.getClass.getCanonicalName().init)

    val context = new SparkContext(conf)


    val acc1: LongAccumulator = context.longAccumulator("totalNum1")
    var acc2 = context.doubleAccumulator("totalNum2")
    var acc3 = context.collectionAccumulator("totalNum3")

    var rdd1 = context.parallelize(1 to 10)

    rdd1.map{
      x=> acc1.add(x);println(acc1.value);x
    }

    rdd1.count()
    Thread.sleep(1000)
    println(acc1.value)


    context.stop()
  }
}
