package com.offcn.spark.p4

import org.apache.spark.util.AccumulatorV2

import scala.collection.mutable

/**
 * @Auther: BigData-LGW
 * @ClassName: MyAccumultor
 * @Date: 2020/12/8 20:29
 * @功能描述:
 * @Version:1.0
 */
case class MyAccumultor() extends AccumulatorV2[String,Map[String,Int]]{
    private var map = mutable.Map[String,Int]()

    override def isZero: Boolean = true

    override def copy(): AccumulatorV2[String, Map[String, Int]] = {
        val newAccu = new MyAccumultor
        newAccu.map = this.map
        newAccu
    }

    override def reset(): Unit = this.map.clear()

    override def add(word: String): Unit = {
        map.put(word,map.getOrElse(word,0)+1)
    }

    override def merge(other: AccumulatorV2[String, Map[String, Int]]): Unit = {
        for((word,count) <- other.value){
            map.put(word,map.getOrElse(word,0) + count)
        }
    }

    override def value: Map[String, Int] = this.map.toMap
}
