package cn.ipanel.bigdata.boot.source.genre

import cn.ipanel.bigdata.boot.date.Time
import cn.ipanel.bigdata.boot.logger.Logger
import org.apache.commons.lang3.exception.ExceptionUtils
import org.apache.spark.sql.{DataFrame, Row, SQLContext}

/**
 * Author: lzz
 * Date: 2021/11/16 16:15
 */
abstract class MultiMysql(val dbName: String, val tbName: String) extends Mysql(dbName, tbName) {

  import Mysql._

  private[this] final lazy val SC = spark.sqlContext
//  private[this] final lazy val SC = SQLContext.getOrCreate(spark.sparkContext)

  override def load: DataFrame = {
    Logger.I(s"Load Mysql[$getDBName.$getTBName]")
    try {
      SC.read
        .format(protocol.source)
        .options(Map(PARAM_URL -> database.url,
                     PARAM_DRIVER -> protocol.driver,
                     PARAM_USER -> database.username,
                     PARAM_PASSWORD -> database.password,
                     PARAM_TABLE -> "information_schema"))
        .load()
        .select("table_name")
        .filter(s"table_schema='$getDBName' and table_name REGEXP '${getTBName}_[0-9]{1,3}")
        .collect()
        .map(x => exec(x.getString(0)))
        .reduce((x1, x2) => x1.union(x2))
    } catch {
      case e: Exception =>
        Logger.E(
          s"""Load Mysql[$getDBName.$getTBName] Failed.
             | Because: ${ExceptionUtils.getStackTrace(e)}
             |""".stripMargin)
        null
    }
  }
}