package com.spark.util.utils

import org.apache.hadoop.hbase.client.{Result, Scan}
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.protobuf.ProtobufUtil
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos
import org.apache.hadoop.hbase.util.{Base64, Bytes}

import scala.util.Random

object HBaseUtil {

  def getPartitionRowKey(rowKey:String, partitionNum:Int): String = {
    val random = Random.nextInt(partitionNum)
    s"$random|"+ rowKey
  }

  def convertStringToScan(base64:String):Scan = {
    val decoded = Base64.decode(base64)
    val scan = ClientProtos.Scan.parseFrom(decoded)
    ProtobufUtil.toScan(scan)
  }

  def convertScanToString(scan:Scan):String = {
    val scan1 = ProtobufUtil.toScan(scan)
    val bytes = scan1.toByteArray
    Base64.encodeBytes(bytes)
  }

  def getRowKey(row:(ImmutableBytesWritable,Result)): String = {
    Bytes.toString(row._1.get)
  }

  def getCell(row:(ImmutableBytesWritable,Result),family:String,qualifier:String): String = {
    Bytes.toString(row._2.getValue(Bytes.toBytes(family),Bytes.toBytes(qualifier)))
  }
}
