package cn.huq.day03

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}


//rdd.map()
/**
def map[U: ClassTag](f: T => U): RDD[U] = withScope {
          val cleanF = sc.clean(f)
          new MapPartitionsRDD[U, T](this, (context, pid, iter) => iter.map(cleanF))
        }
 */

/**
 *   def mapPartitions[U: ClassTag](
      f: Iterator[T] => Iterator[U],
      preservesPartitioning: Boolean = false): RDD[U] = withScope {
    val cleanedF = sc.clean(f)
    new MapPartitionsRDD(
      this,
      (context: TaskContext, index: Int, iter: Iterator[T]) => cleanedF(iter),
      preservesPartitioning)
  }
 */


object MapPartitionsDemo {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("mapPartitionsWithIndex").setMaster("local")
    val sc = new SparkContext(conf)

    val arr = Array(1,2,3,4,5,6,7,8,9,10,11)
    val rdd: RDD[Int] = sc.parallelize(arr,3)

    val rdd2: RDD[Int] = rdd.mapPartitions(iter => {
      iter.filter(_ % 2 == 0)
    })

    println(rdd2.collect().toBuffer)


  }

}
