package com.xzx.spark.core.transform

import org.apache.spark.rdd.RDD
import org.apache.spark.{Partitioner, SparkConf, SparkContext}

/**
 *
 * ${DESCRIPTION}
 *
 * @author xinzhixuan
 * @version 1.0
 * @date 2021-06-26 8:39 下午
 */
object Spark015_KeyValue_PartitionBy {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("Spark015_KeyValue_PartitionBy")
    val sparkContext = new SparkContext(conf)
    val dataRDD = sparkContext.makeRDD(List((1,"aaa"),(2,"bbb"),(3,"ccc")))
//    val result: RDD[(Int, String)] = dataRDD.partitionBy(new HashPartitioner(2))
    val result: RDD[(Int, String)] = dataRDD.partitionBy(new MyPartitioner(2))
    result.saveAsTextFile("output")


    sparkContext.stop()
  }
}

class MyPartitioner(partitions: Int) extends Partitioner {
  override def numPartitions: Int = partitions

  override def getPartition(key: Any): Int =  key match {
    case a:Int => if (a %2 == 0) 0 else 1
    case _ => 0
  }
}
