package cn.itcast.model.utils

import cn.itcast.model.bean.HBaseMeta
import org.apache.spark.sql.{DataFrame, SQLContext, SaveMode}
import org.apache.spark.sql.sources.{BaseRelation, CreatableRelationProvider, RelationProvider}

/**
 * 自定义数据源
 * 写入HBase/读取HBase
 */
class HBaseSource extends CreatableRelationProvider with RelationProvider with Serializable {

  /**
   * HBase数据写入的方法
   * @param sqlContext SparkSQL上下文对象
   * @param mode 保存模式: 追加/覆盖
   * @param parameters 外面传递进来的参数信息
   * @param data 本次要保存的数据
   * @return 数据写入的具体实现
   */
  override def createRelation(sqlContext: SQLContext,
                              mode: SaveMode,
                              parameters: Map[String, String],
                              data: DataFrame): BaseRelation = {
    val relation = new HBaseWritableRelation(sqlContext, HBaseMeta(parameters), data)
    // 我们需要手动调用insert方法,才能讲数据保存起来
    relation.insert(data, false)
    relation
  }
  override def createRelation(sqlContext: SQLContext,
                              parameters: Map[String, String]): BaseRelation ={
    val relation = new HBaseReadableRelation(sqlContext, HBaseMeta(parameters))
    relation
  }
}
