package com.zck2.utils

import org.apache.spark.Partitioner

import scala.collection.mutable

class UDFPartitioner (val args: Array[Long]) extends Partitioner{
  val partitionMap: mutable.HashMap[Long, Int] = new mutable.HashMap[Long, Int]()

  //分区号
  var parId = 0

  //每一个key一个分区
  for (arg <- args) {
    if (!partitionMap.contains(arg)) {
      partitionMap(arg) = parId
      parId += 1
    }
  }

  //用于指定自定义分区数
  override def numPartitions: Int = partitionMap.valuesIterator.length

  //获取每个key的分区号
  override def getPartition(key: Any): Int = {
    partitionMap.getOrElse(key.toString.toLong, -1) // 如果找不到key，返回 -1
  }
}
