package cn.ipanel.bigdata.boot.source.genre

import cn.ipanel.bigdata.boot.logger.Logger
import cn.ipanel.bigdata.boot.source._
import org.apache.commons.lang3.exception.ExceptionUtils
import org.apache.spark.sql.{DataFrame, SQLContext, SaveMode}

/**
 * Author: lzz
 * Date: 2021/11/16 16:15
 * @param dbName   redis数库名称【为别名】
 * @param dbIndex  redis库号
 */
abstract class Redis(dbName: String, dbIndex: Byte) extends Mapper with Db {


  private[this]  lazy val SC: SQLContext = spark.sqlContext
  // scala 低版本可以这么用
//  private[this]  lazy val SC: SQLContext = SQLContext.getOrCreate(spark.sparkContext)

  override def getSourceGenre: DataSource.Genre = DataSource.GENRE_REDIS
  override def getSourceModel: DataSource.Model = DataSource.MODEL_READ

  override def getDBName: String = dbName

  def getDbIndex: Byte = dbIndex

  def exec(sql: String): DataFrame = {
    null
  }

  def save(df: DataFrame, mode: SaveMode): Unit = {
    if (df != null) {
      getSourceModel match {
        case DataSource.MODEL_WRITE =>
          Logger.I(s"Save Redis[$dbIndex]")
          try {
            df.write
              // org.apache.spark.sql.redis
              .format(protocol.source)
              .option("table", "foo")
              .save()
          } catch {
            case e: Exception =>
              Logger.E(
                s"""Save Reis[$dbIndex] Failed.
                   | Because: ${ExceptionUtils.getStackTrace(e)}
                   |""".stripMargin)
          }
        case _ =>
          Logger.I(s"Refuse Save Redis[$dbIndex]. Because: Only Read")
      }
    }
  }

  def load(): DataFrame = {
    import com.redislabs.provider.redis._
    import spark.implicits._
    val keysRDD = spark.sparkContext.fromRedisKeyPattern("foo*", 5)
    keysRDD.toDF()
  }
  
}

protected[source] object Redis {
  val PARAM_       : String = "url"
  val PARAM_USER      : String = "user"
  val PARAM_PASSWORD  : String = "password"
  val PARAM_DBINDEX   : String = "dbindex"
}