package cn.darksoul3.spark.operator

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext, TaskContext}

object MapPartitions {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
    conf.setAppName("map partitions")
    conf.setMaster("local[*]")

    val sc = new SparkContext(conf)

    val numbers: RDD[Int] = sc.makeRDD(List(1, 2, 3, 4, 5, 6, 7, 8, 9), numSlices = 3)

    val results = numbers.map(x => { //one by one
      val partitionId = TaskContext.getPartitionId()
      (partitionId, x * 100)
    })

    val result2 = numbers.mapPartitions(iter => { //one by collection
      val partitionId: Int = TaskContext.getPartitionId()
      iter.map(x => (partitionId, x * 100))
    }, preservesPartitioning = true) //keep first partition partitioner


    val result3 = numbers.mapPartitionsWithIndex((index: Int, iter: Iterator[Int]) => {
      iter.map(ele => s"partition index $index element $ele")
    })

    result3.saveAsTextFile("map-out3")

    sc.stop()
  }

}
