import org.apache.spark.{Partition, Partitioner, SparkConf, SparkContext}

class NewPartition(numParts:Int) extends Partitioner {

  override def numPartitions: Int = numParts

  override def getPartition(key: Any): Int = {
    key.toString.toInt % 10
  }

}

object Test{
  def main(args : Array[String]): Unit ={
    val conf = new SparkConf().setAppName("TestPartitioner").setMaster("local")
    conf.set("spark.testing.memory", "2147480000")
    val sc = new SparkContext(conf)
    val data = sc.parallelize(1 to 10, 5)
    data.collect().map(println)
    data.take(3).map(println)
    //data.foreach(println)

    //data.map((_,1)).partitionBy(new NewPartition(10)).map(_._1).saveAsTextFile("newPartOut")

  }

}
