package cn.itcast.up.model.tools

import cn.itcast.up.model.bean.HBaseMeta
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.client.Result
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapreduce.TableInputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Row, SQLContext}
import org.apache.spark.sql.sources.{BaseRelation, TableScan}
import org.apache.spark.sql.types.{DataType, StringType, StructField, StructType}

/**
  * 当前类是我们自定义数据源读取的真正实现类
  */
class ReadableRelation(context: SQLContext, meta: HBaseMeta) extends BaseRelation with TableScan with Serializable {
  /**
    * 提供一个SQLContext对象
    * @return
    */
  override def sqlContext: SQLContext = context

  /**
    * 构建本次生成的数据的Schema,当前数据的元数据信息,列名,列的类型,列是否可以为空.
    * @return
    */
  override def schema: StructType = {
    //我们应该返回哪些列
    val fields: Array[StructField] = meta.selectFields //id,job
      .split(",")
      .map(fieldName => {
        StructField(fieldName, StringType)
      })
    //将列字段信息封装为StructType
    StructType(fields)
  }

  /**
    * 构建数据源,将读取到的数据转换为Row返回,
    * @return
    */
  override def buildScan(): RDD[Row] = {
//定义HBase的配置对象
    val conf: Configuration = HBaseConfiguration.create()
    conf.set("hbase.zookeeper.property.clientPort", meta.zkPort)
    conf.set("hbase.zookeeper.quorum", meta.zkHosts)
    conf.set("zookeeper.znode.parent", "/hbase-unsecure")
    conf.set(TableInputFormat.INPUT_TABLE, meta.hbaseTable)
    //自己获取HBase中的数据,
    val source: RDD[(ImmutableBytesWritable, Result)] = context.sparkContext.newAPIHadoopRDD(
      conf,
      classOf[TableInputFormat],
      classOf[ImmutableBytesWritable],
      classOf[Result]
    )
    //获取result结果
    val resultRDD: RDD[Result] = source.map(_._2)
    //进行数据的转换转换封装.
    val result: RDD[Row] = resultRDD.map(result => {
      //将result => Row
      //根据当前用户选择的列来获取列值.因为result中包含所有的列信息
      val seq: Seq[String] = meta.selectFields.split(",")
        .map(fieldName => {
          //根据列名,获取当前列的值
          Bytes.toString(result.getValue(meta.family.getBytes, fieldName.getBytes)
          )
        }).toSeq
      //将列值放入Row中
      Row.fromSeq(seq)
    })
    result
  }
}
