package top.jolyoulu.core.rdd.part

import org.apache.spark.rdd.RDD
import org.apache.spark.{Partitioner, SparkConf, SparkContext}

/**
 * @Author: JolyouLu
 * @Date: 2024/2/6 16:32
 * @Description
 */
object Spark01_RDD_part {
  def main(args: Array[String]): Unit = {
    val sparkConf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("RDD")
    val sc: SparkContext = new SparkContext(sparkConf)

    val rdd: RDD[(String, String)] = sc.makeRDD(List(
      ("一班", "张三"),
      ("二班", "李四"),
      ("三班", "王五"),
      ("一班", "赵六"),
    ))
    //指定自定义分区器
    val partRDD: RDD[(String, String)] = rdd.partitionBy(new MyClassPartitioner)
    //输出到文件
    partRDD.saveAsTextFile("output")
    //关闭环境
    sc.stop()
  }

  /**
   * 自定义分区器
   */
  class MyClassPartitioner extends Partitioner{
    //分区数量
    override def numPartitions: Int = 3
    //根据数据的Key，返回数据的分区索引（从0开始）
    override def getPartition(key: Any): Int = {
      key match {
        case "一班" => 0
        case "二班" => 1
        case "三班" => 2
      }
    }
  }
}
