package chapter03

import org.apache.spark.{SparkConf, SparkContext}

object Test04_RDD1 {
  def main(args: Array[String]): Unit = {
    //分区
    val conf = new SparkConf().setMaster("local[*]").setAppName("partitions")
    val sc = new SparkContext(conf)
    //创建指定分区的rdd
    val value = sc.makeRDD(List(1, 2, 3, 4, 5, 6),3)
    val value1 = sc.makeRDD(List(1, 2, 3, 4, 5, 6))
    println(value.getNumPartitions)
    println(value1.getNumPartitions)
    //可以重新分区
    println(value1.repartition(4).getNumPartitions)
  }
}
