package com.xzx.spark.core.transform

import org.apache.spark.{SparkConf, SparkContext}

/**
 *
 * ${DESCRIPTION}
 *
 * @author xinzhixuan
 * @version 1.0
 * @date 2021-06-26 8:39 下午
 */
object Spark009_Coalesce {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("Spark009_Coalesce")
    val context = new SparkContext(conf)
    val dataRDD = context.makeRDD(List(
      1, 2, 3, 4, 5, 6
    ), 4)
    val mapPartitionsWithIndexRDD = dataRDD.mapPartitionsWithIndex {
      case (index, iterable) =>
        val list = iterable.toList
        println("before: index:" + index + "；data:" + list)
        list.iterator
    }
    val dataRDD1 = mapPartitionsWithIndexRDD.coalesce(6, true)
    dataRDD1.mapPartitionsWithIndex {
      case (index, iterable) =>
        println("after: index:" + index + "；data:" + iterable.toList)
        iterable
    }.collect()


    context.stop()
  }
}
