package com.offcn.bigdata.spark.p1.p3

import org.apache.spark.util.AccumulatorV2
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable

object _05AccumulatorOps {
    def main(args: Array[String]): Unit = {
        val conf = new SparkConf()
            .setMaster("local[*]")
            .setAppName(s"${_05AccumulatorOps.getClass.getSimpleName}")
        val sc = new SparkContext(conf)

        val lines = sc.textFile("F:/hello.txt")

        val words = lines.flatMap(line => line.split("\\s+"))

        val helloAcc = new HelloAccumulator
        sc.register(helloAcc,"helloAcc")

        val word2Count = words.map(word => {
            if(word == "hello" || word == "me" || word == "you"){
                helloAcc.add(word)
            }
            (word , 1)
        }).reduceByKey(_+_)

        word2Count.foreach(println)
        println(s"helloAccu: ${helloAcc.value}")
        Thread.sleep(2000)
        sc.stop()
    }
}

class HelloAccumulator extends AccumulatorV2[String , Map[String , Long]]{
    var map = mutable.Map[String , Long]()

    override def isZero: Boolean = true

    override def copy(): AccumulatorV2[String, Map[String, Long]] = {
        val helloAccu = new HelloAccumulator
        helloAccu.map = this.map
        helloAccu
    }

    override def reset(): Unit = map.clear()

    override def value: Map[String , Long] = map.toMap

    override def add(word: String): Unit = {
        val option = map.get(word)
        if(option.isDefined){
            map.put(word , 1 + option.get)
        }else{
            map.put(word , 1)
        }
        map.put(word , 1L + map.getOrElse(word , 0L))
    }
    override def merge(other: AccumulatorV2[String, Map[String, Long]]): Unit = {
        for((word , count) <- other.value){
            map.put(word , count + map.getOrElse(word , 0L))
        }
    }
}
