package cn.ipanel.bigdata.dw.dws

import cn.ipanel.bigdata.boot.source.genre.Phoenix
import cn.ipanel.bigdata.dw.BaseColumns
import cn.ipanel.bigdata.dw.dim.phoenix.saltBuckets
import org.apache.spark.sql.{Dataset, Row}

/**
 * @author lzz
 * @environment IntelliJ IDEA 2020.3.1
 * @projectName bigdata_panyu   番禺大数据，区域数据
 * @date 2023/11/14 16:15
 * @description:
 */
protected[dws] object HomeCt extends Phoenix(HBASE_NAMESPACE, "t_home") with BaseColumns {

  final val F_HOME_NUM                 : String = "f_home_num"

  override def getTBColumns: Seq[String] = Seq(
    F_PERIOD_TIME, F_REGION, F_HOME_NUM
  )

  override def buildTable: String = {
    s"""
       |CREATE TABLE IF NOT EXISTS $getDBName.$getTBName(
       |$F_PERIOD_TIME INTEGER NOT NULL
       |, $F_REGION BIGINT NOT NULL
       |, $F_HOME_NUM INTEGER NOT NULL
       |CONSTRAINT PK PRIMARY KEY($F_PERIOD_TIME, $F_REGION)
       |) SALT_BUCKETS = $saltBuckets;
       |""".stripMargin
  }

  override def emptyTable: Dataset[_] = {
    import IMPLICITS._
    spark.createDataset(spark.sparkContext.emptyRDD[HomeTable])
  }

  case class HomeTable(var f_period_time: Int
                       , var f_region: Long
                       , var f_home_num: Int
                       )

  object HomeTable {
    def apply(row: Row): HomeTable = {
      HomeTable(row.getAs[Int](F_PERIOD_TIME)
        , row.getAs[Long](F_REGION)
        , row.getAs[Integer](F_HOME_NUM)
      )
    }
  }
}