package cn.itcast.model.utils

import cn.itcast.model.bean.HBaseMeta
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hbase.client.Result
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Row, SQLContext}
import org.apache.spark.sql.sources.{BaseRelation, TableScan}
import org.apache.spark.sql.types.{StringType, StructField, StructType}

class HBaseReadableRelation(context: SQLContext, meta: HBaseMeta) extends BaseRelation with TableScan with Serializable{
  override def sqlContext: SQLContext = context
  /**
    * 定义一个Schema,主要是为了确定读取到的数据类型等信息
    * 我们需要手动定义Schema
    * @return
    */
  override def schema: StructType = {
    // 如果我们要读取id,job => selectFields
    // 我们可以使用","进行分割,分割之后我们可以获取到列名信息.
    // String列名=>StructField
    val array: Array[StructField] = meta.selectFields.split(",").map(fieldName => {
      val s = StructField(fieldName, StringType)
      s
    })
    StructType(array)
  }

  override def buildScan(): RDD[Row] ={
    // 根据用户指定的HBase相关信息,将数据加载为RDD[Row]
    //context.sparkContext.newAPIHadoopRDD().map()
    // HBase的相关配置
    val conf = new Configuration()
    conf.set("hbase.zookeeper.property.clientPort", meta.zkPort)
    conf.set("hbase.zookeeper.quorum", meta.zkHosts)
    conf.set("zookeeper.znode.parent", "/hbase-unsecure")
    conf.set(TableInputFormat.INPUT_TABLE, meta.hbaseTable)
    // 加载数据
    val hadoopRDD: RDD[(ImmutableBytesWritable, Result)] = context.sparkContext
      .newAPIHadoopRDD(
        conf,
        classOf[TableInputFormat],
        classOf[ImmutableBytesWritable],
        classOf[Result]
      )
    // 将数据转换为Row
    // map转换: (ImmutableBytesWritable, Result) => Row
    val rowRDD: RDD[Row] = hadoopRDD.map(line => {
      // 获取result结果
      val result: Result = line._2
      // result是本条记录的所有内容,里面包含所有的列信息.需要什么,根据列名自己取就行了
      val arr: Array[String] = meta.selectFields.split(",")
        // map转换 String列名=>列值
        .map(fieldName => {
        val bytes: Array[Byte] = result.getValue(meta.family.getBytes, fieldName.getBytes)
        // 将结果转换为String
        Bytes.toString(bytes)
      })
      //将arr封装到Row中.
      //      Row("zhangsan", "18", "beijing")
      Row.fromSeq(arr.toSeq)
    })
    rowRDD
  }
}
