package RDD

import org.apache.spark.{SparkConf, SparkContext}
import org.junit.Test

class createPartition {
  val conf = new SparkConf().setAppName("Test").setMaster("local[6]")
  val sc = new SparkContext(conf)
  @Test
  def createPartition_local(): Unit ={
    //本地集合
    val rdd1 = sc.parallelize(Seq(1,2,3,4,5),2)
    println("rdd1的分区数:"+rdd1.partitions.size)
    //数据集
    val rdd2 = sc.textFile("resource/data.csv")
    println("rdd2的分区数:"+rdd2.partitions.size)
  }
}
