package com.sunzm.spark.core

import java.lang

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.util.LongAccumulator

object AccumulatorDemo {
  def main(args: Array[String]): Unit = {
    //准备执行环境
    val conf = new SparkConf().setAppName("RDD示例")
      .setMaster("local[*]")
    val sc = new SparkContext(conf)

    //读取订单数据
    val orderRDD: RDD[String] = sc.textFile("data/spark/rdd/order.txt")

    val orderCountACC: LongAccumulator = sc.longAccumulator("orderCountACC")

    //累加
    val mapRDD: RDD[Unit] = orderRDD.map(_ => {
      orderCountACC.add(1)
    })

    val resultRDD: RDD[(Int, lang.Long)] = mapRDD.mapPartitionsWithIndex {
      case (index, ite) => {
        ite.map(_ => {
          (index, orderCountACC.value)
        })
      }
    }

    resultRDD.foreach{
      case (index, count) => {
        println(s"${index} -> ${count}")
      }
    }

    val sum: lang.Long = orderCountACC.value

    println(s"sum: ${sum}")

    sc.stop()
  }
}
