package com.edata.bigdata.nebulasource

import com.edata.bigdata.util.EDataUtils
import org.apache.spark.sql.sources.{BaseRelation, CreatableRelationProvider, DataSourceRegister, RelationProvider}
import org.apache.spark.sql.types.{StringType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, SQLContext, SaveMode}

class NebulaDataSourceProvider extends RelationProvider with CreatableRelationProvider with DataSourceRegister {
  override def createRelation(sqlContext: SQLContext, parameters: Map[String, String]): BaseRelation = {
    val schema: StructType = StructType(parameters("ret_cols").split(",").map(col => StructField(col, StringType, nullable = true)))
    new NebulaDataSource(sqlContext,parameters,schema)
  }
  override def createRelation(sqlContext: SQLContext, mode: SaveMode, parameters: Map[String, String], data: DataFrame): BaseRelation = {
    val schema: StructType = StructType(parameters("schema").split(",").map(colType => {
      val col_type = colType.split(":")
      StructField(col_type(0), EDataUtils.convertDataType(col_type(1)), nullable = true)}
    ))
    val datasource = new NebulaDataSource(sqlContext,parameters,schema)
    datasource.insert(data,true)
    datasource
  }
  override def shortName(): String = "nebula"
}
