package cn.doitedu.day06

import org.apache.spark.rdd.RDD
import org.apache.spark.util.LongAccumulator
import org.apache.spark.{SparkConf, SparkContext}

/**
 * 使用spark的累加器功能
 */
object T04_AccumulatorDemo4 {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName(this.getClass.getSimpleName)
      .setMaster("local[4]")
    val sc = new SparkContext(conf)

    val rdd1: RDD[Int] = sc.parallelize(List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), 2)

    //定义一个类加强
    val accumulator: LongAccumulator = sc.longAccumulator("my-acc")

    val rdd2: RDD[Int] = rdd1.map(n => {
      if (n % 2 == 0) {
        accumulator.add(1)
      }
      n * 10
    })

    //rdd2.cache() //为了避免重复计算，将rdd2进行cache

    val resArr = rdd2.collect()

    println(resArr.toBuffer)

    println("第一次触发Action后，偶数的数量：" + accumulator.count)

    accumulator.reset()

    rdd2.saveAsTextFile("out/out06")

    println("第二次触发Action后，偶数的数量：" + accumulator.count)

  }

}
