package com.chenjj.bigdata.spark.scala

import org.apache.spark.{SparkConf, SparkContext}

object HelloSpark {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      .setAppName("WordCountLocal")
      .setMaster("local")
      .set("spark.testing.memory", "1024000000")

    val data = Array(1,2,3,4,5)

    val sc = new SparkContext(conf)
    val distData = sc.parallelize(data);
    println(distData.partitioner.size);


    val distData1 = sc.parallelize(data,3)
    println(distData1.partitioner.size)



  }
}
