package com.bdqn.spark.chapter05.value

import org.apache.spark.{SparkConf, SparkContext}

object Spark03_RDD_Operator_Transform_Test {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf()
      .setMaster("local[*]")
      .setAppName("practise")
    val sc: SparkContext = new SparkContext(conf)

    val sourceRDD = sc.makeRDD(List(1, 2, 3, 4), 2)
    // (0,List[1,2])
    // (1,List[3,4])

    // 拿到第一个分区的数据: index=0
    sourceRDD.mapPartitionsWithIndex(
      (index, iter) => {
        // List((index, iter.toList)).toIterator
        iter.map(
          num => {
            (index, num)
          }
        )
      }
    ).collect.foreach(println)


    sc.stop()
  }
}
