package day06

import java.util

import org.apache.spark.util.AccumulatorV2

import scala.collection.mutable

class MyWCAcc extends AccumulatorV2[String, mutable.HashMap[String,Int]]{
  //初始值
  private val hasAcc = mutable.HashMap[String,Int]()
  //检测初始值是否为空
  override def isZero: Boolean = hasAcc.isEmpty

  override def copy(): AccumulatorV2[String, mutable.HashMap[String, Int]] = {
    val newAcc = new MyWCAcc
    //有可能有多个task同时往初始值里面写值，可能会出现线程安全问题，此时最好加锁
    hasAcc.synchronized{
      newAcc.hasAcc ++= hasAcc
    }
    return newAcc
  }
  //重置累加器
  override def reset(): Unit = hasAcc.clear()
  //局部累加
  override def add(v: String): Unit = {
    hasAcc.get(v) match {
      case None => hasAcc += ((v,1))
      case Some(x) => hasAcc += ((v,x+1))
    }
  }
  //全局合并
  override def merge(other: AccumulatorV2[String, mutable.HashMap[String, Int]]): Unit = {
    other match {
      //o是占位，主要是匹配到后面类型的数据时。来的数据肯定都是这个类型，那加这个是不是多余无意义？
      case o: AccumulatorV2[String, mutable.HashMap[String, Int]] => {
        for ((k,v) <- o.value) {
          hasAcc.get(k) match {
            case None => hasAcc += ((k,v))
            case Some(x) => hasAcc += ((k,v+v))
          }
        }
      }
    }
  }
  //输出值
  override def value: mutable.HashMap[String, Int] = hasAcc
}
