package com.zhaosc.spark.core.broadcast

import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import org.apache.spark.util.AccumulatorV2
import org.apache.spark.util.AccumulatorMetadata
import org.apache.spark.util.LongAccumulator

/**
 * 定义在Driver端，Dirver可以读取值，操作在Executor端，在Executor是不能读取数据的。
 * @author root
 *
 */
object AccumulatorOperator {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local")
      .setAppName("accumulator");

    val sc = new SparkContext(conf);
    
      val accumulator=new LongAccumulator();
    sc.register(accumulator, "accumulator")
    
    val lineRdd=sc.textFile("D:\\zhaoshichao\\workspace\\spark-study\\src\\main\\resources\\data.txt")
  
    
    lineRdd.map(v=>{
      accumulator.add(1);
    }).collect()
    
    println(accumulator.sum)
    sc.stop();
  }

}