package cn.itcast.tags.spark.hbase

import org.apache.spark.sql.{DataFrame, SQLContext, SaveMode}
import org.apache.spark.sql.sources.{BaseRelation, CreatableRelationProvider, DataSourceRegister, RelationProvider}
import org.apache.spark.sql.types.{StringType, StructField, StructType}


/**
 * 默认数据源提供Relation对象，分别为加载数据和保存数据提供Relation对象
 */
class DefaultSource extends RelationProvider with CreatableRelationProvider with DataSourceRegister{

  val SPERATOR: String = ","
  val HBASE_TABLE_SELECT_FIELDS: String = "selectFields"


  /**
   * 从数据与加载数据时使用简称hbase
   * @return
   */
  override def shortName(): String = "hbase"


  /**
   * 从数据源读取数据时，创建Relation对象，次Relation实现TableScan和BaseRelation
   * @param sqlContext
   * @param parameters
   * @return
   */
  override def createRelation(sqlContext: SQLContext, params: Map[String, String]): BaseRelation = {

    //从参数中获取查询字段名称
    val fields: Array[String] = params(HBASE_TABLE_SELECT_FIELDS).split(SPERATOR)

    val userSchema:StructType = new StructType(
      fields.map(
        field=>StructField(field,StringType,nullable = true)
      )
    )
    val relation = new HBaseRelation(sqlContext, params, userSchema)
    relation
  }

  /**
   * 将数据集保存在数据源时，创建Relation对象，此Relation对象实现BaseRelation和InsertbaleRelation
   * @param sqlContext SparkSession实例对象
   * @param mode 保存模式
   * @param parameters 表示链接数据源时的参数，通过option设置
   * @param data 保存的数据集
   * @return
   */
  override def createRelation(sqlContext: SQLContext, mode: SaveMode, params: Map[String, String], data: DataFrame): BaseRelation = {
    //创建HBaseRelation对象
    val relation = new HBaseRelation(sqlContext, params, data.schema)
    relation.insert(data,true)
    relation
  }


}
