package spark.example

import org.apache.spark._
import SparkContext._

object testPartition {

    def main(args: Array[String]) {

        val conf = new SparkConf().setAppName("Test yarn-cluster model partition nums!")
        val sc = new SparkContext(conf)
        val rdd = sc.makeRDD(Seq(1 to 20))
        val rdd1 = sc.parallelize(Seq(10 to 30))
        println("rdd partition nums: " + rdd.partitions.size)
        println("rdd1 partition nums: " + rdd1.partitions.size)
        println("default parallelize: " + sc.defaultParallelism)
        println("default minPartition nums: " + sc.defaultMinPartitions)
    }
}
