package com.leal.entity

import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.Partitioner

/**
 * @Classname bigdata
 * @Description HFilePartitioner 自定义分区器
 * @Date 2023/3/7 14:10
 * @Created by leal
 */
class HFilePartitioner(splits: Array[Array[Byte]], numberRegion: Int) extends Partitioner {
  private val fraction = 1 max numberRegion min 128

  override def numPartitions: Int = splits.length * fraction

  override def getPartition(key: Any): Int = {
    def bytes(n: Any) = n match {
      case s: String => Bytes.toBytes(s)
      case s: Long => Bytes.toBytes(s)
      case s: Int => Bytes.toBytes(s)
      case s: Double => Bytes.toBytes(s)
      case s: Float => Bytes.toBytes(s)
    }

    val h = (key.hashCode() & Int.MaxValue) % fraction
    for (i <- 1 until splits.length) {
      if (Bytes.compareTo(bytes(key), splits(i)) < 0) {
        return (i - 1) * fraction + h
      }
    }
    (splits.length - 1) * fraction + h
  }
}
