package com.doit.day02

import org.apache.spark.{SparkConf, SparkContext}

/**
 * @Author:
 * @WX: 17710299606
 * @Tips: 学大数据 ,到多易教育
 * @DOC: https://blog.csdn.net/qq_37933018?spm=1000.2115.3001.5343
 * @Description:
 */
object Demo06Partition {
  def main(args: Array[String]): Unit = {

       val  conf  = new SparkConf()
             .setMaster("local[6]")
             .setAppName(this.getClass.getSimpleName)
           .set("spark.default.parallelism" ,"4")
           val sc = SparkContext.getOrCreate(conf)

    println(sc.parallelize(List(1, 2, 3, 4, 5, 6, 7, 8) , 3).getNumPartitions)
    println(sc.parallelize(List(1, 2, 3, 4, 5, 6, 7, 8)).getNumPartitions)

   val rdd = sc.textFile("data/words")

   // println(rdd.getNumPartitions)
   // rdd.partitions.length


  }

}
