package cn.ipanel.bigdata.boot.source

import cn.ipanel.bigdata.boot.source.genre.{HBase, Hdfs, Hive, MultiMysql, Mysql, Phoenix}
import org.apache.spark.sql.Dataset

/**
 * Author: lzz
 * Date: 2021/11/19 9:35
 */
object DataSource {

  case class Genre(name: String)

  lazy val GENRE_MYSQL: Genre = Genre("mysql")
  lazy val GENRE_REDIS: Genre = Genre("redis")
  lazy val GENRE_PHOENIX: Genre = Genre("phoenix")
  lazy val GENRE_ORACLE: Genre = Genre("oracle")
  lazy val GENRE_SQLSERVER: Genre = Genre("sqlserver")
  lazy val GENRE_HIVE: Genre = Genre("hive")
  lazy val GENRE_HBASE: Genre = Genre("hbase")
  lazy val GENRE_HDFS: Genre = Genre("hdfs")
  lazy val GENRE_VIEW: Genre = Genre("view")
  lazy val GENRE_CASSANDRA: Genre = Genre("cassandra")

  case class Model(name: String)
  lazy val MODEL_READ: Model = Model("read")
  lazy val MODEL_WRITE: Model = Model("write")

  def build(genre: Genre, dbName: String, tbName: String, isMulti: Boolean = false): Mapper = {
    genre match {
      case GENRE_MYSQL    => if (isMulti) buildMultiMysql(dbName, tbName) else buildMysql(dbName, tbName)
      case GENRE_PHOENIX  => buildPhoenix(dbName, tbName)
      case GENRE_HIVE     => buildHive(dbName, tbName)
      case GENRE_HBASE    => buildHBase(dbName, tbName)
      case GENRE_HDFS     => buildHdfs(dbName)
      case _ => null
    }
  }
  def buildMysql(dbName: String, tbName: String): Mysql = new Mysql(dbName, tbName) {
    override def getSourceModel: DataSource.Model = DataSource.MODEL_READ
    override def getTBColumns: Seq[String] = Seq()
    override def buildTable: String = ""
    override def emptyTable: Dataset[Table] = {
      import IMPLICITS._
      spark.createDataset(spark.sparkContext.emptyRDD[Table])
    }
  }
  def buildMultiMysql(dbName: String, tbName: String): Mysql = new MultiMysql(dbName, tbName) {
    override def getSourceModel: DataSource.Model = DataSource.MODEL_READ
    override def getTBColumns: Seq[String] = Seq()
    override def buildTable: String = ""
    override def emptyTable: Dataset[Table] = {
      import IMPLICITS._
      spark.createDataset(spark.sparkContext.emptyRDD[Table])
    }
  }
  def buildPhoenix(dbName: String, tbName: String): Phoenix = new Phoenix(dbName, tbName) {
    override def getSourceModel: DataSource.Model = DataSource.MODEL_READ
    override def getTBColumns: Seq[String] = Seq()
    override def buildTable: String = ""
    override def emptyTable: Dataset[Table] = {
      import IMPLICITS._
      spark.createDataset(spark.sparkContext.emptyRDD[Table])
    }
  }
  def buildHive(dbName: String, tbName: String): Hive = new Hive(dbName, tbName) {
    override def getSourceModel: DataSource.Model = DataSource.MODEL_READ
    override def getTBColumns: Seq[String] = Seq()
    override def buildTable: String = ""
    override def emptyTable: Dataset[Table] = {
      import IMPLICITS._
      spark.createDataset(spark.sparkContext.emptyRDD[Table])
    }
  }
  def buildHBase(dbName: String, tbName: String): HBase = new HBase(dbName, tbName) {
    override def getSourceModel: DataSource.Model = DataSource.MODEL_READ
    override def getTBColumns: Seq[String] = Seq()
    override def buildTable: String = ""
    override def emptyTable: Dataset[Table] = {
      import IMPLICITS._
      spark.createDataset(spark.sparkContext.emptyRDD[Table])
    }
  }
  def buildHdfs(genre: String): Hdfs = new Hdfs(genre) {
    override def getSourceModel: DataSource.Model = DataSource.MODEL_READ
  }

  case class Table()
}