package com.alison.scala

import org.apache.spark.util.AccumulatorV2
import org.apache.spark.{SparkConf, SparkContext}
import scala.collection.mutable.Map

object AccmulatorTest {

  def main(args: Array[String]): Unit = {
    customAcc
  }

  def accmulatorTest() = {
    val conf = new SparkConf().setAppName(this.getClass.getName).setMaster("local")
    val sc = new SparkContext(conf)
    val rdd = sc.makeRDD(List(("a", 1), ("b", 2), ("a", 3), ("b", 4)))
    // 声明累加器
    val sumAcc = sc.longAccumulator("sumAcc")
    rdd.foreach {
      case (work, count) => {
        sumAcc.add(count)
      }
    }
    // 取值
    println(sumAcc.value)
  }

  //计算以H开头的单词出现的次数
  def customAcc = {
    val conf = new SparkConf().setAppName(this.getClass.getName).setMaster("local")
    val sc = new SparkContext(conf)
    val rdd = sc.makeRDD(List("Hello", "HaHa", "spark", "scala", "Hi", "Hello", "Hi"))
    val myAcc = new MyAccmulator
    sc.register(myAcc, "myAcc")
    rdd.foreach {
      datas => {
        myAcc.add(datas)
      }
    }
    println(myAcc.value)
    sc.stop()

  }

  class MyAccmulator extends AccumulatorV2[String, scala.collection.mutable.Map[String, Int]] {


    var map: scala.collection.mutable.Map[String, Int] = scala.collection.mutable.Map[String, Int]()

    override def isZero: Boolean = map.isEmpty

    override def copy(): AccumulatorV2[String, Map[String, Int]] = {
      val myacc = new MyAccmulator
      myacc.map = this.map
      myacc
    }

    override def reset(): Unit = {
      map.clear
    }

    override def add(v: String): Unit = {
      if (v.startsWith("H")) {
        map(v) = map.getOrElse(v, 0) + 1
      }
    }

    override def merge(other: AccumulatorV2[String, Map[String, Int]]): Unit = {
      map = this.map.foldLeft(other.value) {
        (m, kv) => {
          m(kv._1) = map.getOrElse(kv._1, 0) + kv._2
          m
        }
      }
    }
    override def value: Map[String, Int] = {
      map
    }
  }
}
