package cn.ipanel.bigdata.dw.realtime

import cn.ipanel.bigdata.boot.source.genre.Phoenix
import cn.ipanel.bigdata.dw.dws.HBASE_NAMESPACE
import cn.ipanel.bigdata.dw.{BaseColumns, DetailColumns, SUM_REGION_SERVERS}
import org.apache.spark.sql.{Dataset, Row}

/**
 * @author lzz
 * @environment IntelliJ IDEA 2020.3.1
 * @projectName bigdata_shanxi
 * @date 2023/08/04 15:00
 * @description: 来自 ipanel 数据库的应用统计信息
 */
 protected[realtime] object AlarmDetail extends Phoenix(HBASE_NAMESPACE, "t_alarm_by_tag_detail") with BaseColumns with DetailColumns {

  final val F_TAG                   : String = "f_tag"                        // 设备分类，为大分类
  final val F_ALARM_COUNT           :String  = "f_alarm_count"                // 报警次数


  override def getTBColumns: Seq[String] = Seq(F_DATE, F_REGION, F_DEVICE_ID, F_TAG, F_ALARM_COUNT)

  override def buildTable: String = {
    s"""
       |CREATE TABLE IF NOT EXISTS $getDBName.$getTBName (
       |$F_DATE INTEGER NOT NULL
       |, $F_REGION BIGINT NOT NULL
       |, $F_DEVICE_ID VARCHAR NOT NULL
       |, $F_TAG VARCHAR NOT NULL
       |, $F_ALARM_COUNT INTEGER
       |constraint pk primary key
       |($F_DATE, $F_REGION, $F_DEVICE_ID, $F_TAG)) salt_buckets = $SUM_REGION_SERVERS
       |""".stripMargin
  }

  override def emptyTable: Dataset[_] = {
    import spark.implicits._
    spark.createDataset(spark.sparkContext.emptyRDD[AlertTable])
  }

  case class AlertTable(f_date: Int,
                        f_region: Long,
                        f_device_id: String,
                        f_tag: String,
                        f_alarm_count: Int)


  object AlertTable {

    def apply(row: Row): AlertTable = {
      AlertTable(row.getAs[Int](F_DATE)
        , row.getAs[Long](F_REGION)
        , row.getAs[String](F_DEVICE_ID)
        , row.getAs[String](F_TAG)
        , row.getAs[Int](F_ALARM_COUNT)
      )
    }
  }
}
