package com.offcn.bigdata.spark.p1.p3

import org.apache.spark.{SparkConf, SparkContext}

object _04AccumulatorOps {
    def main(args: Array[String]): Unit = {
        val conf = new SparkConf()
            .setMaster("local[*]")
            .setAppName(s"${_04AccumulatorOps.getClass.getSimpleName}")
        val sc = new SparkContext(conf)

        val lines = sc.textFile("F:/hello.txt")

        val words = lines.flatMap(line => line.split("\\s+"))
        val helloAcc = sc.longAccumulator("helloAcc")
        val meAcc = sc.longAccumulator("meAcc")
        val youAcc = sc.longAccumulator("youAcc")

        val word2Count = words.map(word => {
            if(word == "hello"){
                helloAcc.add(1)
            }else if(word == "me"){
                meAcc.add(1)
            }else if(word == "you"){
                youAcc.add(1)
            }
            (word , 1)
        }).reduceByKey(_+_)
        word2Count.foreach(println)
        println(s"helloAcc: ${helloAcc}")
        Thread.sleep(2000)
        sc.stop()
    }

}
