package com.zhang.spark_2.com.zhang.core.wc

import org.apache.spark.rdd.RDD
import org.apache.spark.util.AccumulatorV2
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable

/**
 * @title:
 * @author: zhang
 * @date: 2022/2/17 09:47 
 */
object WC_Acc {

  def main(args: Array[String]): Unit = {
    //TODO 获取环境
    val conf = new SparkConf().setMaster("local").setAppName("WordCount")
    val sc = new SparkContext(conf);

    //todo 声明并注册 自定义累加器
    val acc = new WcAcc()
    sc.register(acc, "wc")


    val rdd = sc.makeRDD(
      List(("a", 1), ("b", 2), ("a", 1), ("b", 2), ("a", 1), ("b", 2)), 2
    )

    val word: RDD[String] = rdd.map {
      case (word, num) => {
        (word + " ") * num
      }
    }.flatMap(_.split(" "))

    word.foreach(
      w => {
        acc.add(w)
      }
    )
    println(acc.value)
    sc.stop()
  }

  class WcAcc extends AccumulatorV2[String, mutable.Map[String, Int]] {
    private val map = mutable.Map[String, Int]()

    override def isZero: Boolean = {
      map.isEmpty
    }

    override def copy(): AccumulatorV2[String, mutable.Map[String, Int]] = {
      new WcAcc()
    }

    override def reset(): Unit = {
      map.clear()
    }

    override def add(v: String): Unit = {
      val cnt: Int = map.getOrElse(v, 0)
      map.update(v, cnt + 1)
    }

    override def merge(other: AccumulatorV2[String, mutable.Map[String, Int]]): Unit = {
      val map2: mutable.Map[String, Int] = other.value
      map2.foldLeft(map)(
        (map,kv)=>{
          val word: String = kv._1
          val cnt: Int = kv._2
          val oldCnt: Int = map.getOrElse(word, 0)
          map.update(word,cnt+oldCnt)
          map
        }
      )
    }
    override def value: mutable.Map[String, Int] = {
      map
    }
  }

}
