package cn.itcast.model.util

import cn.itcast.model.bean.HBaseMeta
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hbase.client.Result
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Row, SQLContext}
import org.apache.spark.sql.sources.{BaseRelation, TableScan}
import org.apache.spark.sql.types.{StringType, StructField, StructType}

class HBaseReadableRelation(context: SQLContext, meta: HBaseMeta)
extends BaseRelation with TableScan with Serializable {
  override def sqlContext: SQLContext = context

  override def schema: StructType = {
    val fields: Array[StructField] = meta.selectFields.split(",")
      .map(fileName => {
        StructField(fileName, StringType)
      })
    StructType(fields)
  }

  override def buildScan(): RDD[Row] = {
    val conf = new Configuration()
    conf.set("hbase.zookeeper.property.clientPort", meta.zkPort)
    conf.set("hbase.zookeeper.quorum", meta.zkHosts)
    conf.set("zookeeper.znode.parent", "/hbase-unsecure")
    conf.set(TableInputFormat.INPUT_TABLE, meta.hbaseTable)
    val hadoopRDD: RDD[(ImmutableBytesWritable, Result)] = context.sparkContext
      .newAPIHadoopRDD(
        conf,
        classOf[TableInputFormat],
        classOf[ImmutableBytesWritable],
        classOf[Result]
      )
    val resultRDD: RDD[Result] = hadoopRDD.map(_._2)
    val rowRDD: RDD[Row] = resultRDD.map(result => {
      val arr: Array[String] = meta.selectFields.split(",")
        .map(fileName => {
          val columnValue: Array[Byte] = result.getValue(meta.family.getBytes(), fileName.getBytes())
          Bytes.toString(columnValue)
        })
      Row.fromSeq(arr.toSeq)
    })
    rowRDD
  }
}
