package org.hadoop.spark
import org.apache.spark.util.{AccumulatorV2, LongAccumulator}
import org.apache.spark.{SparkConf, SparkContext}
object Accumulator {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf();
    conf.setMaster("local[2]");
    conf.setAppName("Accumulator");
    val sc: SparkContext = new SparkContext(conf);
    sc.setLogLevel("WARN");
    //声明累加器
    val acc: LongAccumulator = sc.longAccumulator("A");
    val rdd = sc.parallelize(1 to 10);
    val rdd2 = rdd.map(i => {//注意map是惰性操作
      acc.add(1);//对累加器加1
      i + 1;
    });
    println(rdd2.collect().toSet);
    println("输出结果：" + acc.count); //获取值=10
    sc.stop();
  }
}
