package org.spark.api.utils

import org.apache.hadoop.hbase.client.{Get, Result, Table}

/**
 * 处理和解析Result的一些辅助函数
 */
object ResultUtil {

  def getValue[T](result: Result, family: Array[Byte], column: Array[Byte], reader: Array[Byte] => T): Option[T] = {
    getBytes(result, family, column).map(reader)
  }

  def getBytes(result: Result, family: Array[Byte], column: Array[Byte]): Option[Array[Byte]] = {
    if (result == null || family == null || column == null) None
    else Option(result.getValue(family, column))
  }

  def getValue[T](table: Table, rowKey: Array[Byte], family: Array[Byte], column: Array[Byte], reader: Array[Byte] => T): Option[T] = {
    getBytes(table, rowKey, family, column).map(reader)
  }

  def getBytes(table: Table, rowKey: Array[Byte], family: Array[Byte], column: Array[Byte]): Option[Array[Byte]] = {
    if (table == null || rowKey == null || family == null || column == null) None
    else {
      val get = new Get(rowKey)
      get.addColumn(family, column)
      getBytes(table.get(get), family, column)
    }
  }
}
