package cn.ipanel.bigdata.boot.source.genre

import cn.ipanel.bigdata.boot.date.Day
import cn.ipanel.bigdata.boot.logger.Logger
import cn.ipanel.bigdata.boot.source.{DataSource, Mapper, SparkExec, Table}
import cn.ipanel.bigdata.utils.Dictionary
import org.apache.commons.lang3.exception.ExceptionUtils
import org.apache.spark.sql.{DataFrame, Dataset}
import org.apache.spark.sql.functions.{col, lit}

abstract class Cassandra(dbName: String, tbName: String) extends Mapper with Table {

  override def getSourceGenre: DataSource.Genre = DataSource.GENRE_CASSANDRA
  override def getSourceModel: DataSource.Model = DataSource.MODEL_WRITE
  final val F_DATE       : String = Dictionary.F_DATE         // 分区字段
  override def getDBName: String = dbName
  override def getTBName: String = tbName
  override def getTBColumns: Seq[String] = Seq()
  override def buildTable: String = ""

  override def emptyTable: Dataset[Table] = {
    import spark.implicits._
    spark.createDataset(spark.sparkContext.emptyRDD[Table])
  }

  def load: DataFrame = {
    Logger.I(s"Load cassandra[$getDBName.$getTBName]")
    try {
      spark.read
        .format(protocol.source)
        .option("keyspace", s"$getDBName").option("table", s"$getTBName").load()
    } catch {
      case e: Exception =>
        Logger.E(
          s"""Load cassandra[$getDBName.$getTBName] Failed.
             | Because: ${ExceptionUtils.getStackTrace(e)}
             |""".stripMargin)
        emptyTable.toDF()
    }
  }

  def find(day: Day): DataFrame = {
      load.filter(col(F_DATE) === lit(day.toDate))
  }

  def readToDataFrame(day: Day): DataFrame = {
    try {
      find(day)
    } catch {
      case e: Exception =>
        Logger.E(
          s""" read day[${day.toDate}] $dbName.$tbName failed.
             | Because: ${ExceptionUtils.getStackTrace(e)}
             |""".stripMargin)
        emptyTable.toDF()
    }
  }

  case class Table()
}
