package com.gy.spark.core

import org.apache.spark.rdd.RDD
import org.apache.spark.{Partitioner, SparkConf, SparkContext}

object TestPartitionerMain {

  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf()
      .setAppName(TestPartitionerMain.getClass.getSimpleName)
      .setMaster("local[*]")
    val sc: SparkContext = new SparkContext(conf)

    val rdd1: RDD[String] = sc.makeRDD(Array("hadoop", "hdfs", "mapReduce", "hbase", "spark", "hdfs"))
    val kyRdd = rdd1.map(x => (x, x.length))

    val rt: RDD[(String, Int)] = kyRdd.partitionBy(new EditPartitioner(3))

    rt.foreachPartition(x => x.foreach(println))

    rt.cache()

    rt.reduceByKey(new EditPartitioner(3), _ + _).saveAsTextFile("data2")

    rt.saveAsTextFile("data1")

    sc.stop
  }
}

class EditPartitioner(num: Int) extends Partitioner {
  override def numPartitions: Int = num

  override def getPartition(key: Any): Int = {
    key.toString.length match {
      case 4 => 0
      case 5 => 1
      case 6 => 2
      case _ => 0
    }
  }

}