package com.operator

import org.apache.spark.{SparkConf, SparkContext}

object CoalesceOperator {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setAppName("MapOperator").setMaster("local")

    val sc = new SparkContext(sparkConf)
    val dataArr = Array(1,2,3,4,5,6,7,8,9,10,11,12)
    val rdd =sc.parallelize(dataArr,3)

    rdd.mapPartitionsWithIndex((index,iterator)=>{
      println("partiton:"+index)
      while(iterator.hasNext){
        println(iterator.next())
      }
      iterator
    },false).count()

    val coalesceRdd = rdd.coalesce(6,false);

    coalesceRdd.mapPartitionsWithIndex((index,iterator)=>{
      println("重分区后:"+index)
      while(iterator.hasNext){
        println(iterator.next())
      }
      iterator
    },false).count()
  }
}
