package cn.ipanel.bigdata.dw.dim.phoenix

import cn.ipanel.bigdata.boot.source.genre.Phoenix
import cn.ipanel.bigdata.dw.dws.HBASE_NAMESPACE
import cn.ipanel.bigdata.utils.Dictionary.F_DATE
import org.apache.spark.sql.{Dataset, Row}

/**
 * @author lzz
 * @environment IntelliJ IDEA 2020.3.1
 * @projectName bigdata_panyu   番禺大数据，设备分类
 * @date 2023/11/14 16:15
 * @description:
 */
protected[phoenix] object ClassTag extends Phoenix(HBASE_NAMESPACE, "t_class_tag") {

  final val F_CLASS     : String  = "f_class"
  final val F_TAG       : String  = "f_tag"
  final val F_TITLE     : String  = "f_title"
  final val F_TAG_NAME  : String  = "f_tag_name"

  override def getTBColumns: Seq[String] = Seq(
    F_DATE, F_CLASS, F_TAG, F_TITLE, F_TAG_NAME
  )

  override def buildTable: String = {
    s"""
       |CREATE TABLE IF NOT EXISTS $getDBName.$getTBName(
       |$F_DATE INTEGER NOT NULL
       |, $F_CLASS VARCHAR NOT NULL
       |, $F_TAG VARCHAR
       |, $F_TITLE VARCHAR
       |, $F_TAG_NAME VARCHAR
       |CONSTRAINT PK PRIMARY KEY($F_DATE,$F_CLASS)
       |) SALT_BUCKETS = $saltBuckets;
       |""".stripMargin
  }

  override def emptyTable: Dataset[_] = {
    import IMPLICITS._
    spark.createDataset(spark.sparkContext.emptyRDD[ClassTagTable])
  }
  case class ClassTagTable(var f_date: Int
                          , var f_class: String
                          , var f_tag: String
                          , var f_title: String
                          , var f_tag_name: String
                         )

  object ClassTagTable {
    def apply(row: Row): ClassTagTable = {
      ClassTagTable(row.getAs[Int](F_DATE)
        , row.getAs[String](F_CLASS)
        , row.getAs[String](F_TAG)
        , row.getAs[String](F_TITLE)
        , row.getAs[String](F_TAG_NAME)
      )
    }
  }
}