package com.galeno.算子.转换算子



import com.galeno.utils.SparkUtil
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD

/**
 * @Title: ${file_name}
 * @Description: ${todo}
 * @author galeno
 * @date 2021/8/2620:22
 * mapPartitionsWithIndex ：  比 mapPartitions多得到了一个分区号
 */
object mapPartitions {
  def main(args: Array[String]): Unit = {
    val sc: SparkContext = SparkUtil.getSc
    val lst=List(1,2,3,4,5,6,7)
    val lst2: RDD[Int] = sc.makeRDD(lst, 3)
    lst2.foreach(println)
    println("*"*100)
    //不显示分区号
    lst2.mapPartitions(itor=>{
      itor.map(_*10)
    }).foreach(println)
    println("*"*100)
    //显示分区号
    lst2.mapPartitionsWithIndex((index,itor)=>{
      itor.map(index+":::::"+_*10)
    }).foreach(println)






  }

}
