package com.xf.day04

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

object Test01 {
  def main(args: Array[String]): Unit = {
    // 创建 SparkConf 对象
    val conf = new SparkConf()
      .setAppName("WordCount")
      .setMaster("local[*]")
      .set("spark.ui.port", "8080")
      .set("spark.driver.host", "127.0.0.1")

    // 创建 SparkContext 对象
    val sc = new SparkContext(conf)

    val rdd :RDD[Int] = sc.parallelize(1 to 16, 4)
    println(rdd.partitions.size) // 4

    val coalRDD2 = rdd.coalesce(3)
    println(coalRDD2.partitions.size) // 3

    val coalRDD = rdd.coalesce(5)
    println(coalRDD.partitions.size) // 4

    val coalRDD1 = rdd.coalesce(5, true)
    println( coalRDD1.partitions.size) // 5

    val coalRDD3 = rdd.repartition(8)
    println(coalRDD3.partitions.size)  // 8
  }

}
