package com.zhang.spark_1.spark_core.partition

import org.apache.spark.rdd.RDD
import org.apache.spark.{Partitioner, SparkConf, SparkContext}

/**
 * @title:
 * @author: zhang
 * @date: 2021/12/8 18:37 
 */
object Spark01_Partition {

  def main(args: Array[String]): Unit = {
    //获取spark的连接
    val conf: SparkConf = new SparkConf().setMaster("local[*]").setAppName("operator")
    val sc: SparkContext = new SparkContext(conf)
    val rdd = sc.makeRDD(List(
      ("nba",33333333),
      ("cba",11111),
      ("asx",111111),
      ("asx",66666),
    ))

    val par: RDD[(String, Int)] = rdd.partitionBy(new MyPartitioner)

    par.saveAsTextFile("o2")

    sc.stop()
  }

  //自定义分区 继承Partitioner
  class MyPartitioner extends Partitioner {
    override def numPartitions: Int = 3

    override def getPartition(key: Any): Int = {
      key match {
        case "nba" => 0
        case "cba" => 1
        case _ => 2
      }
    }
  }
}
