package chapter03

import org.apache.spark.{SparkConf, SparkContext}

object Test20_partitionBy {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("partitionBy")
    val sc = new SparkContext(conf)
    val value
    = sc.makeRDD(List((1, "abc"), (2, "bcd"), (3, "bcd"), (4, "ehf")),2)
    //输出每个分区中内容
    value.foreachPartition(e=>println(e.toList))
    value.partitionBy(new MyPartition(2))
      .foreachPartition(e=>println(e.toList))
  }
}
import org.apache.spark.Partitioner
import org.apache.spark.HashPartitioner
class MyPartition(numPart:Int) extends Partitioner{
  override def numPartitions: Int = numPart
  override def getPartition(key: Any): Int = {
    key match {
      case i:Int if(i%2==0)=>0
      case _:Int =>1
    }
  }
}
