package com.gy.spark.core.transformations

import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable.ListBuffer

/**
 * 重分区函数
 */
object Operator_coalesce {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local").setAppName(this.getClass.getSimpleName)
    val sc = new SparkContext(conf)

    val rdd1 = sc.parallelize(Array(1, 2, 3, 4, 5, 6), 4)
    val rdd2 = rdd1.mapPartitionsWithIndex((partitionIndex, iter) => {
      val list = new ListBuffer[String]()
      while (iter.hasNext) {
        list += "partition:" + partitionIndex + "\t value:" + iter.next()
      }
      list.iterator
    })
    println("rdd2==============================")
    rdd2.foreach(println)

    println("rdd3==============================")
    val rdd3 = rdd2.coalesce(5, true)
    rdd3.foreach(println)

    println("rdd4==============================")
    rdd3.mapPartitionsWithIndex((index, iter) => iter.map {
      "new partition:" + index + "\t value:" + _
    }).foreach(println)


    sc.stop
  }

}
