package org.apache.spark.day02

import org.apache.spark.rdd.{MapPartitionsRDD, RDD}
import org.apache.spark.{SparkConf, SparkContext}

/**
 * mapPartitionsIndex实现
 */
object MapPartitionsFilterRddDemo {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("data").setMaster("local[*]")
    val sc = new SparkContext(conf)

    val array = Array(1,2,3, 4,5,6, 7,8,9,10)
    val rdd: RDD[Int] = sc.parallelize(array, 3)

    val res: MapPartitionsRDD[String, Int] =
      new MapPartitionsRDD[String, Int](rdd, (tc, index, iter) => {
      iter.map(e => s"partition:${tc.partitionId()}, value: $e")
    })

    println(res.collect().toBuffer)

    sc.stop()
  }

}
