package com.xbai.spark.core.transform

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

/**
  * repartition(numPartitions) 算子
  * 作用：根据分区数，重新通过网络随机洗牌所有数据
  * repartition实际上是调用的coalesce，默认是进行shuffle的
  * 案例：创建一个4个分区的RDD，对其重新分区
 *
  * @author xbai
  * @Date 2020/12/30
  */
object Spark12_Repartition {

  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("repartition")
    val sc = new SparkContext(conf)

    val listRDD: RDD[Int] = sc.makeRDD(1 to 16, 4)
    println("缩减分区前 = " + listRDD.partitions.length)
    val repartitionRDD: RDD[Int] = listRDD.repartition(2)
    println("缩减分区后 = " + repartitionRDD.partitions.length)
  }
}
