package cn.itcast.tags.spark.sql


import org.apache.spark.sql.{DataFrame, SQLContext, SaveMode}
import org.apache.spark.sql.sources.{BaseRelation, CreatableRelationProvider, DataSourceRegister, RelationProvider}
import org.apache.spark.sql.types.{StringType, StructField, StructType}

class DefaultSource extends RelationProvider with CreatableRelationProvider with DataSourceRegister with Serializable  {

  val HBASE_TABLE_SELECT_FIELDS: String = "selectFields"
  val SPERATOR: String = ","


  override def createRelation(sqlContext: SQLContext, parameters: Map[String, String]): BaseRelation = {

    val schema: StructType = StructType(
      parameters(HBASE_TABLE_SELECT_FIELDS).split(SPERATOR)
        .map {
          field =>
            StructField(field, StringType, nullable = true)
        }
    )

    val relation = new HBaseRelation(sqlContext, parameters, schema)
    relation

  }

  override def createRelation(sqlContext: SQLContext, mode: SaveMode, parameters: Map[String, String], data: DataFrame): BaseRelation = {

    val relation = new HBaseRelation(sqlContext, parameters, data.schema)

//   var overrideboolean =  match  data {
//     case SaveMode.Overwrite => true
//     case _=> false
//   }


    relation.insert(data,false)
    relation

  }

  override def shortName(): String = "hbase"
}
