package sparkcore.day7.lesson01

import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by Administrator on 2018/5/3.
  */
object Lesson01 {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("test")
    val sc = new SparkContext(conf)
//    val array = Array(1,2,3,43,45)
//    val rdd = sc.parallelize(array)
//    println("==========================="+rdd.getNumPartitions)
    val rdd = sc.textFile("hdfs://hadoop1:9000/hello.txt")
    val partitioner = rdd.partitioner

    //new HashPartitioner()


  }

}
