package com.study.bigdata.spark.core.rdd.acc

import org.apache.spark.util.AccumulatorV2
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable

object Scala02_Acc {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setMaster("local[*]").setAppName("WordCount")
    val sc = new SparkContext(conf)

    val rdd = sc.makeRDD(
      List(
        "scala",
        "scala",
        "scala",
        "scala",
        "scala",
        "scala",
        "spark",
        "spark",
        "spark",
        "spark"
      )
    )
    // TODO 创建累加器
    val acc = new WorldCountAccumulator
    // TODO 向spark中注册
    sc.register(acc,"wordCount")
    rdd.foreach(
      word =>{
        // TODO 将单词放入累加器
        acc.add(word)
      }
    )
    println(acc.value)

    sc.stop()

  }
  // 自定义数据累加器
  // 1.继承AccumulatorV2
  // 2.定义泛型
  // 3.重写方法
  class WorldCountAccumulator extends AccumulatorV2[String,mutable.Map[String,Int]]{
    private val wcMap =mutable.Map[String,Int]()
    // 判断累加器是否为初始状态
    override def isZero: Boolean = {
      wcMap.isEmpty
    }

    override def copy(): AccumulatorV2[String, mutable.Map[String,Int]] = {
      new WorldCountAccumulator
    }

    // 重置累加器
    override def reset(): Unit = {
      wcMap.clear()
    }

    // 从外部向累加器添加数据
    override def add(word: String): Unit = {
      val oldCnt = wcMap.getOrElse(word,0)
      wcMap.update(word,oldCnt+1)

    }

    // 合并两个累加器的结果
    override def merge(other: AccumulatorV2[String, mutable.Map[String,Int]]): Unit = {
      other.value.foreach{
        case (word,cnt) =>{
          val oldCnt = wcMap .getOrElse(word,0)
          wcMap.update(word,oldCnt+cnt)
        }
      }
    }

    // 将结果返回到外部
    override def value: mutable.Map[String,Int] = wcMap
  }

}
