package com.offcn.bigdata.spark.p3

import org.apache.spark.util.AccumulatorV2
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable


/**
  * accumulator的操作
  */
object _05AccumulatorOps {
    def main(args: Array[String]): Unit = {
        val conf = new SparkConf()
            .setMaster("local[*]")
            .setAppName(s"${_05AccumulatorOps.getClass.getSimpleName}")
        val sc = new SparkContext(conf)

        val lines = sc.textFile("file:/E:/work/2020-0828期大数据/workspace/spark-parent-0828/data/accumulator.txt")

        val words = lines.flatMap(line => line.split("\\s+"))
        // a at level出现多少次
        val myAccu = new MyAccumulator
        sc.register(myAccu, "myAccu")

        val word2Count = words.map(word => {
            if(word == "a" || word == "at" || word == "level") {
                myAccu.add(word)
            }
            (word, 1)
        }).reduceByKey(_+_)

        word2Count.foreach(println)
        println("---------------累加器的值-----------------------")
        println(s"myAccu: ${myAccu.value}")
        Thread.sleep(200000)
        sc.stop()
    }
}

class MyAccumulator extends AccumulatorV2[String, Map[String, Long]] {
    var map = mutable.Map[String, Long]()
    override def isZero: Boolean = true

    override def copy(): AccumulatorV2[String, Map[String, Long]] = {
        val myAccu = new MyAccumulator
        myAccu.map = this.map
        myAccu
    }

    override def reset(): Unit = map.clear()

    override def value: Map[String, Long] = map.toMap

    override def add(word: String): Unit = {//分区内聚合
//        val option = map.get(word)
//        if(option.isDefined) {
//            map.put(word, 1 + option.get)
//        } else {
//            map.put(word, 1)
//        }
        map.put(word, 1L + map.getOrElse(word, 0L))
    }
    //分区间聚合
    override def merge(other: AccumulatorV2[String, Map[String, Long]]): Unit = {
        for((word, count) <- other.value) {
            map.put(word, count + map.getOrElse(word, 0L))
        }
    }

}