package com.doit.day04

import org.apache.spark.util.AccumulatorV2
import  scala.collection._

/**
 * @Author: Hang.Nian.YY
 * @WX: 17710299606
 * @Tips: 学大数据 ,到多易教育
 * @DOC: https://blog.csdn.net/qq_37933018?spm=1000.2115.3001.5343
 * @Description:
 */
class MyAccumulator extends  AccumulatorV2[String , Map[String,Int]]{

  val  res =  mutable.Map[String , Int]()
  override def isZero: Boolean = {
    if(res.isEmpty) true else  false
  }

  override def copy(): AccumulatorV2[String, Map[String, Int]] =  new  MyAccumulator

  override def reset(): Unit = res.clear()

  /**
   * 每个分区内部  :
   *       局部计算逻辑
   * @param v
   */
  override def add(v: String): Unit = {
    var cnt = res.getOrElse(v, 0)
    cnt+=1
    res.put(v, cnt)
  }

  override def merge(other: AccumulatorV2[String, Map[String, Int]]): Unit = {
    val map1 = other.value    //  (a,8)  (b,9)
    //将累加器局部的Map集合中的数据  合并到当前
    //  (a,9)
    map1.map(tp=>{
      val word = tp._1
      val value1 = tp._2
      val value2 = res.getOrElse(word, 0)
      res.put( word  ,value2+value1)
    })
  }
  override def value: Map[String, Int] = res
}
