package com.zhaosc.spark.core

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext

object CoalesceOperator {

  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setAppName("CoalesceOperator").setMaster("local")
    val sc = new SparkContext(sparkConf)
    //
    val dataArr = Array("Angelababy1", "Angelababy2", "Angelababy3", "Angelababy4", "Angelababy5", "Angelababy6", "Angelababy7", "Angelababy8", "Angelababy9", "Angelababy10", "Angelababy11", "Angelababy12")

    val dataRdd = sc.parallelize(dataArr, 3);

    dataRdd.mapPartitionsWithIndex((index, iterator) => {
      println("partitionId:" + index)
      while (iterator.hasNext) {
        println(iterator.next())
      }
      iterator
    }, false).count

    //第一个参数代表返回的RDD的分区数   第二个参数代表重分区的过程是否产生shuffle
    val coalesceRDD = dataRdd.coalesce(6, false)

    println("coalesceRDD.getNumPartitions:" + coalesceRDD.getNumPartitions)

    coalesceRDD.mapPartitionsWithIndex((index, values) => {
      println("重分区后partitionId:" + index)
      while (values.hasNext) {
        println(values.next())
      }
      values
    }).count()

    sc.stop()
  }
}