package Value类型

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object coalesce {
    //简单理解为合并分区：没有shuffle.由第二个传参决定
    def main(args: Array[String]): Unit = {

        val config: SparkConf = new SparkConf().setMaster("local[*]").setAppName("WordCount")

        val sc = new SparkContext(config)

        val rdd = sc.parallelize(1 to 100, 4)

        println("缩减分区前" + rdd.partitions.size)
        rdd.glom().collect().foreach(array => {
            println(array.mkString(","))
        })

        val coalesceRDD: RDD[Int] = rdd.coalesce(3, false)

        println("缩减分区后" + coalesceRDD.partitions.size)
        coalesceRDD.glom().collect().foreach(array => {
            println(array.mkString(","))
        })


    }

}
