package com.catmiao.spark.rdd.part

import org.apache.spark.rdd.RDD
import org.apache.spark.{Partitioner, SparkConf, SparkContext}

/**
 * @title: Spark01_RDD_Dep
 * @projectName spark_study
 * @description: TODO
 * @author ChengMiao
 * @date 2024/2/28 23:20
 */
object Spark01_RDD_part {


  def main(args: Array[String]): Unit = {
    val sparkCon = new SparkConf().setMaster("local[*]").setAppName("rdd")

    val sparkContext = new SparkContext(sparkCon)

    val rdd = sparkContext.makeRDD(List(
      ("nba", "123452345"),
      ("cba", "qwer1234"),
      ("wnba", "25365436"),
      ("nba", "ertyf123")
    ), 3)

    val partRDD = rdd.partitionBy(new MyPartitioner)

    partRDD.saveAsTextFile("output3")

    sparkContext.stop()
  }

  // 自定义分区器

  /**
   * 继承 Partitioner 类
   */
  class MyPartitioner extends Partitioner {

    // 份数量
    override def numPartitions: Int = 3

    // 根据数据的key值，返回数据的分区索引【从0开始】
    override def getPartition(key: Any): Int = {
      key match {
        case "nba" => 0
        case "cba" => 1
        case _ => 2
      }
    }
  }
}
