package cn.huq.day03

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object MapPartitionsIndexDemo {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("mapPartitionsWithIndex").setMaster("local")
    val sc = new SparkContext(conf)

    val arr = Array(1,2,3,4,5,6,7,8,9,10,11)
    val rdd: RDD[Int] = sc.parallelize(arr,3)

    val rdd2: RDD[String] = rdd.mapPartitionsWithIndex((index, iter) => {
      iter.map(e => s"partition:$index, value:$e")
    })

    println(rdd2.collect().toBuffer)


  }

}
