import org.apache.spark.Partitioner
val rdd = sc.parallelize(Seq("apple", "banana", "orange", "pear", "watermelon", "grape", "pineapple"))

class CustomPartitioner extends Partitioner {
  override def numPartitions: Int = 2
  override def getPartition(key: Any): Int = {
    key match {
      case str: String =>
        if (str.length <= 5) 0 else 1
      case _ => throw new IllegalArgumentException("Expected a string")
    }
  }
}

val partitionedRDD = rdd.map(str => (str, 1)).partitionBy(new CustomPartitioner).persist()

val resultRDD = partitionedRDD.mapPartitions(iter => Iterator(iter.size))
resultRDD.collect().foreach(println)
