package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.util.LongAccumulator
import org.apache.spark.{SparkConf, SparkContext}

object Demo21Accumulator {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()

    conf.setMaster("local")

    conf.setAppName("wc")

    val sc = new SparkContext(conf)

    val studentsRDD: RDD[String] = sc.textFile("data/students.txt")

    var count = 0
    studentsRDD.foreach(stu => {
      count += 1

      //再算子内对算子外的一个普通变量进行累加，再算子外读不到累加结果
      //因为算子内的代码运行再Executor，算子外的代码运行再Driver
      //算子内的变量只是一个变量副本
      println(count)
    })
    println(s"count:$count")

    /**
     * 累加器
     *
     */
    //1、定义累加器
    val accumulator: LongAccumulator = sc.longAccumulator

    val mapRDD: RDD[String] = studentsRDD.map(stu => {

      //2、对累累加器进行累加
      accumulator.add(1)
      stu
    })

    mapRDD.foreach(println)


    //3、再Driver端读取累加结果
    println(s"accumulator:${accumulator.value}")

  }

}
