package com.atguigu.bigdata.spark.core.acc

import org.apache.spark.util.AccumulatorV2
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable

/**
  * create by undeRdoG on  2021-06-14  17:46
  * 凡心所向，素履以往，生如逆旅，一苇以航。
  */
object Spark04_ACC {

  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setAppName("RDD").setMaster("local[*]")
    val sc = new SparkContext(sparkConf)


    val rdd = sc.makeRDD(List("hello world", "hello spark"))

    val wcACC = new MyAccumulator()
    sc.register(wcACC, "wordCountAcc")

    rdd.foreach(
      word => {
        wcACC.add(word)
      }
    )

    println(wcACC.value)
  }


  /**
    * 自定义累加器
    *
    * IN : 累加器输入内容
    * out：累加器返回的数据类型
    **/

  class MyAccumulator extends AccumulatorV2[String, mutable.Map[String, Long]] {

    private var wcMap = mutable.Map[String, Long]()

    // 判断当前是否为初始状态
    override def isZero: Boolean = {
      wcMap.isEmpty
    }

    // 复制累加器
    override def copy(): AccumulatorV2[String, mutable.Map[String, Long]] = {
      new MyAccumulator()
    }

    //重置累加器
    override def reset(): Unit = {
      wcMap.clear()
    }


    /**
      * 获取累加器计算的值
      **/
    override def add(word: String): Unit = {
      val currentCount = wcMap.getOrElse(word, 0L) + 1
      wcMap.update(word, currentCount)
    }


    /**
      * driver  合并多个累加器
      * 合并两个map
      **/
    override def merge(other: AccumulatorV2[String, mutable.Map[String, Long]]): Unit = {
      val map1 = this.wcMap
      val map2 = other.value

      map2.foreach(
        element => {
          val newCount = map1.getOrElse(element._1, 0L) + element._2
          map1.update(element._1, newCount)
        }
      )
    }

    override def value: mutable.Map[String, Long] = {
      wcMap
    }
  }

}
