package cn.ipanel.bigdata.dw.dim.phoenix

import cn.ipanel.bigdata.boot.source.genre.Phoenix
import cn.ipanel.bigdata.dw.dws.HBASE_NAMESPACE
import cn.ipanel.bigdata.utils.Dictionary.F_STATUS
import org.apache.spark.sql.{Dataset, Row}

/**
 * @author lzz
 * @environment IntelliJ IDEA 2020.3.1
 * @projectName bigdata_panyu   番禺大数据，区域数据
 * @date 2023/11/14 16:15
 * @description:
 */
protected[phoenix] object IOTHome extends Phoenix(HBASE_NAMESPACE, "t_iot_home") {

  final val F_HOME_ID                     : String = "f_home_id"
  final val F_REGION                      : String = "f_region"
  final val F_CREATE_TIME                 : String = "f_create_time"      // 创建时间
  final val F_UPDATE_TIME                 : String = "f_update_time"      // 状态变更时间

  override def getTBColumns: Seq[String] = Seq(
    F_REGION, F_STATUS, F_HOME_ID, F_CREATE_TIME, F_UPDATE_TIME
  )

  override def buildTable: String = {
    s"""
       |CREATE TABLE IF NOT EXISTS $getDBName.$getTBName(
       |$F_HOME_ID VARCHAR NOT NULL
       |, $F_REGION BIGINT
       |, $F_STATUS TINYINT
       |, $F_CREATE_TIME VARCHAR
       |, $F_UPDATE_TIME VARCHAR
       |CONSTRAINT PK PRIMARY KEY($F_HOME_ID)
       |) SALT_BUCKETS = $saltBuckets;
       |""".stripMargin
  }

  override def emptyTable: Dataset[_] = {
    import IMPLICITS._
    spark.createDataset(spark.sparkContext.emptyRDD[HomeTable])
  }

  case class HomeTable(var f_region: Long
                       , var f_status: Byte
                       , var f_home_id: String
                       , var f_create_time: String
                       , var f_update_time: String
                       )

  object HomeTable {
    def apply(row: Row): HomeTable = {
      HomeTable(row.getAs[Long](F_REGION)
        , row.getAs[Byte](F_STATUS)
        , row.getAs[String](F_HOME_ID)
        , row.getAs[String](F_CREATE_TIME)
        , row.getAs[String](F_UPDATE_TIME)
      )
    }
  }
}