package com.doit.spark.day05

import org.apache.spark.util.AccumulatorV2

import scala.collection.mutable
import scala.collection.mutable.HashMap

/**
 * @DATE 2022/1/8/11:27
 * @Author MDK
 * @Version 2021.2.2
 * */
class MyAccumalator extends AccumulatorV2[String, HashMap[String, Int]]{

  //传值  确定一个hashmap
  private val mp = new HashMap[String, Int]

  override def isZero: Boolean = mp.isEmpty

  override def copy(): AccumulatorV2[String, mutable.HashMap[String, Int]] = new MyAccumalator

  override def reset(): Unit = mp.clear()

  override def add(word: String): Unit = {
    val cnt: Int = mp.getOrElse(word, 0) +1
    mp.update(word, cnt)
  }

  override def merge(other: AccumulatorV2[String, mutable.HashMap[String, Int]]): Unit = {
    //mp1 和 mp2是两个累加器  分别获取不同的task中的值
    val mp1 = mp
    val mp2: mutable.HashMap[String, Int] = other.value
    mp2.map(tp=>{
      val key = tp._1
      val cnt = tp._2
      val newCnt = mp1.getOrElse(key, 0) + cnt
      mp1.update(key,newCnt)
    })
  }

  override def value: mutable.HashMap[String, Int] = mp
}
