package com.shujia.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.util.LongAccumulator
import org.apache.spark.{SparkConf, SparkContext}

object Demo23Acc {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()

    conf.setMaster("local")

    conf.setAppName("acc")

    val sc = new SparkContext(conf)


    val studentRDD: RDD[String] = sc.textFile("data/students.txt", 2)

    //算子的代码运行在Driver端
    var count = 0

    studentRDD.foreach(stu => {
      //算子内的代码运行在Executor端
      //在spark写代码的时候不能在算子内取修改算子外的一个普通变量，
      //就算修改了在算子外也不会生效
      count += 1
      println(count)
    })

    println(count)


    /**
      * 累加器
      */

    //1、定义累加器
    val countAcc: LongAccumulator = sc.longAccumulator

    studentRDD.foreach(stu => {

      //2、在算子内对累加器进行累加
      countAcc.add(1)

    })

    //3、在算子外获取累加的结果
    println(countAcc.value)



  }

}
