import org.apache.spark.{Partitioner,SparkContext,SparkConf}
class MyPa extends Partitioner{
  override def numPartitions:Int=2
  override def getPartition(key:Any):Int={
    key match{
      case str:String =>  
        if(str.length<=5) 0 else 1
      case _ =>throw new IllegalArgumentException("Expected a string")
    }
  }
}
val conf=new SparkConf()
val sc=new SparkContext(conf)
val rdd=sc.parallelize(Seq("apple","banana","orange","pear","watermelon","grape","pineapple"))
val data=rdd.map(s=>(s,1)).partitionBy(new MyPa).persist()
val result=data.mapPartitions(iter=>Iterator(iter.size))
result.collect().foreach(println)

