package io.sqrtqiezi.spark.partitioner

import org.apache.spark.{Partitioner, SparkConf, SparkContext}

class MyPartitioner(partitions: Int) extends Partitioner {
  override def numPartitions: Int = partitions

  override def getPartition(key: Any): Int = {
    val k = key.toString.toInt
    k / 100
  }
}

object UserDefinedPartitioner {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      .setAppName(this.getClass.getCanonicalName.init)
      .setMaster("local[*]")
    val sc = new SparkContext(conf)
    sc.setLogLevel("WARN")

    val random = scala.util.Random
    val arr = (1 to 100).map(idx => random.nextInt(1000))
    val rdd1 = sc.makeRDD(arr).map((_, 1))
    rdd1.glom.collect.foreach(x => println(x.toBuffer))

    println("******************************************")
    val rdd2 = rdd1.partitionBy(new MyPartitioner(10))
    rdd2.glom.collect.foreach(x => println(x.toBuffer))

    sc.stop()
  }
}
