package com.xzx.spark.core.acc

import org.apache.spark.util.AccumulatorV2
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable

/**
 *
 * ${DESCRIPTION}
 *
 * @author xinzhixuan
 * @version 1.0
 * @date 2021-08-07 10:33 下午
 */
object Spark002_Custom {
  def main(args: Array[String]): Unit = {
    val sc = new SparkContext(new SparkConf().setMaster("local[*]").setAppName(getClass.getSimpleName))
    val rdd = sc.textFile("input/wc.txt").flatMap(_.split(" "))
    val wordCuntAccumulator = new WordCuntAccumulator
    sc.register(wordCuntAccumulator)
    rdd.foreach(x => {
      println(Thread.currentThread().getName + Thread.currentThread().getId)
      wordCuntAccumulator.add(x)
    })
    println(wordCuntAccumulator.value)
    sc.stop()
  }
}

class WordCuntAccumulator extends AccumulatorV2[String, mutable.Map[String, Int]] {
  // 保存累加器计算的结果
  var map: mutable.Map[String, Int] = mutable.Map()

  override def isZero: Boolean = map.isEmpty

  override def copy(): AccumulatorV2[String, mutable.Map[String, Int]] = new WordCuntAccumulator

  override def reset(): Unit = map.clear()

  override def add(v: String): Unit = {
    //    val cnt = map.getOrElse(v, 0) + 1
    //    map.update(v, cnt)

    // 简写
    map(v) = map.getOrElse(v, 0) + 1
  }

  override def merge(other: AccumulatorV2[String, mutable.Map[String, Int]]): Unit = {
    other.value.foreach {
      case (key, value) =>
        map(key) = map.getOrElse(key, 0) + value
    }
  }

  override def value: mutable.Map[String, Int] = map
}
