package org.wj.arithmetic

import org.apache.spark.rdd.RDD
import org.wj.config.LocalSparkContext

import scala.collection.mutable.ListBuffer

/**
 * 与mapPartitions操作类似，但是有所不同：它对每一个分区一次应用f函数，在应用函数时，当前分区会传入f函数中
 */
object MapPartitionsWithIndex extends App  with LocalSparkContext{

  private val rdd: RDD[Int] = sc.parallelize(1 to 100, 4)


  rdd.mapPartitionsWithIndex((index, iter)=>{
    var result = ListBuffer[(Int, Int)]()
    while(iter.hasNext){
      val i = iter.next()
      if(i>10){
        result += ((index, i))
      }
    }
    result.iterator
  }).foreach(println(_))

}
