package org.apache.spark.doe

import org.apache.spark.rdd.{MapPartitionsRDD, RDD}
import org.apache.spark.{SparkConf, SparkContext, TaskContext}

object MapDemo1 {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setAppName(this.getClass.getSimpleName).setMaster("local[*]")
    val sc = new SparkContext(conf)

    val rdd1: RDD[Int] = sc.parallelize(List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), 2)

    //val rdd2: RDD[Double] = rdd1.map(_ * 10.0)

    val func = (x: Int) => x * 10.0
//    val rdd2: MapPartitionsRDD[Double, Int] = new MapPartitionsRDD[Double, Int](
//      rdd1,
//      (tc: TaskContext, index: Int, it: Iterator[Int]) => it.map(func)
//    )

        val rdd2: MapPartitionsRDD[Double, Int] = new MapPartitionsRDD[Double, Int](
          rdd1,
          (_, _, it: Iterator[Int]) => it.map(func)
        )

    rdd2.saveAsTextFile("out/out01")
  }

}
