package cn.darksoul3.spark.operator

import org.apache.spark.{SparkConf, SparkContext}

object Count {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName("ForeachPartition").setMaster("local[*]")

    val sc = new SparkContext(conf)

    val numbers = sc.parallelize(List(1, 2, 3, 4, 5, 6, 7), numSlices = 2)
    val sum = sc.runJob(numbers, (it: Iterator[Int]) => {
      var count = 0L
      while (it.hasNext) {
        count += 1L
        it.next()
      }
      count
    }).sum

    println(sum)

    sc.stop()

  }
}
