package com.spark.mooc.ch5_rdd.part01_RDDBasics

import org.apache.spark.{Partitioner, SparkConf, SparkContext}

/**
 * @description: 自定义分区
 * @time: 2020/11/27 14:08
 * @author: lhy
 */
class MyPartitioner(numParts:Int) extends Partitioner{
    // 覆盖分区数
    override def numPartitions: Int = numParts
    // 覆盖分区号获取函数
    override def getPartition(key: Any): Int = {
        key.toString.toInt % 10
    }
}
object TestPartitioner {
    def main(args: Array[String]): Unit = {
        val conf = new SparkConf().setAppName("partitioner").setMaster("local")
        val sc = new SparkContext(conf)
        // 模拟5个分区的数据
        val data = sc.parallelize(1 to 5,5)
        // 根据尾号转变成10个分区，分别写到10个文件
        data.map((_,1)).partitionBy(
            new MyPartitioner(10))
          .map(_._1).saveAsTextFile("output/rdd/partitioner")
    }
}
