package com.offcn.bigdata.spark.p3

import org.apache.spark.{SparkConf, SparkContext}

/**
  * 累加器的操作：
  *     就是做了累加/计数的工作。
  */
object _03AccumulatorOps {

    def main(args: Array[String]): Unit = {
        val conf = new SparkConf()
            .setMaster("local[*]")
            .setAppName(s"${_02BroadcastOps.getClass.getSimpleName}")
        val sc = new SparkContext(conf)
        
        val line = sc.textFile("file:/E:/work/2020-0828期大数据/workspace/spark-parent-0828/data/error.log")

        ///////////////////非累加器的思想////////////////////
        val totalCount = line.count()
        var errorCount = line.filter(line => line == "error").count()
        println(s"totalCount: ${totalCount}")
        println(s"errorCount: ${errorCount}")
        println("----------------------------------------------")
        ///////////////////累加器的思想////////////////////
        val tcAccu = sc.longAccumulator("totalCount")
        val rdd = line.filter(line => {
            tcAccu.add(1L)
            line == "error"
        })

        println(s"action 前 totalCount: ${tcAccu.value}")
        errorCount = rdd.count()
        println(s"action 后 totalCount: ${tcAccu.value}")
        println(s"errorCount: ${errorCount}")
        sc.stop()
    }
}
