package cn.ipanel.bigdata.dw.dim.phoenix

import cn.ipanel.bigdata.boot.config.{ConfigKey, Configuration}
import cn.ipanel.bigdata.boot.date.Day
import cn.ipanel.bigdata.boot.source.genre.Phoenix
import cn.ipanel.bigdata.dw.dws.HBASE_NAMESPACE
import cn.ipanel.bigdata.utils.Dictionary.F_DATE
import org.apache.spark.sql.{DataFrame, Dataset}

protected[phoenix] object AreaUpdateTime extends Phoenix(HBASE_NAMESPACE, "t_area_update_time") {

  final val F_AREA_SIZE  = "f_area_size"
  final val F_TARGET      = "f_target"

  override def getTBColumns: Seq[String] = Seq(F_DATE, F_AREA_SIZE, F_TARGET)

  override def buildTable: String = {
    s"""
       |create table if not exists $getDBName.$getTBName (
       |$F_DATE integer not null
       |, $F_AREA_SIZE integer
       |, $F_TARGET varchar
       |constraint pk primary key
       |($F_DATE)) salt_buckets = 3;
       |""".stripMargin
  }

  def createSingleRowDataset(day: Day, size: Long): DataFrame = {
    val data = Array((day.toDate, size, Configuration.getParam(ConfigKey.PROJECT_TARGET)))
    spark.createDataFrame(data).toDF(F_DATE, F_AREA_SIZE, F_TARGET)
  }

  override def emptyTable: Dataset[_] = {
    import spark.implicits._
    spark.createDataset(spark.sparkContext.emptyRDD[AreaUpdateTimeTable])
  }

  case class AreaUpdateTimeTable(f_date: Int, f_area_size: Long, f_target: String)
}
