package com.kv

import org.apache.spark.{HashPartitioner, SparkConf, SparkContext}

object KVDemo01 {
  def main(args: Array[String]): Unit = {
    val conf=new SparkConf();
    conf.setAppName("kvdemo01")
    conf.setMaster("local[2]")
    val sc=new SparkContext(conf);
    val data = Array((1, "a"), (2, "b"), (3, "c"))
    val rdd1 = sc.makeRDD(data, 4)

    //    val rdd1=sc.makeRDD(Array((1,"a"),(2,"b"),(3,"c")),4);
   val rdd2= rdd1.partitionBy(new HashPartitioner(3))
    println(rdd2.partitions.length)
  }
}
