package com.offcn.spark.p4

import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Auther: BigData-LGW
 * @ClassName: Accumulator3
 * @Date: 2020/12/8 20:27
 * @功能描述: $FunctionDescription
 * @Version:1.0
 */
object Accumulator3 {
    def main(args: Array[String]): Unit = {
        val conf = new SparkConf()
            .setAppName("Accumulator3")
            .setMaster("local[*]")
        val sc = new SparkContext(conf)
        val listRDD = sc.parallelize(List(
            "a second spark a spark is shared second",
            "spark shared be shared in second spark"
        ))
        val myAccu = new MyAccumultor()
        sc.register(myAccu,"myAccu")
        val pair = listRDD.flatMap(_.split("\\s+")).map(word => {
            if(word == "spark" || word == "second"){
                myAccu.add(word)
            }
            (word,1)
        })
        val ret = pair.reduceByKey(_+_)
        println("action前，累加结果：" + myAccu.value)
        ret.foreach(println)
        println("action后，累加结果：" + myAccu.value)
        sc.stop()
    }
}
