package cn.ipanel.bigdata.dw

import cn.ipanel.bigdata.boot.config.Configuration
import cn.ipanel.bigdata.comm.Parameter
import cn.ipanel.bigdata.utils.Util.strToI
import cn.ipanel.bigdata.boot.date.Day
import cn.ipanel.bigdata.boot.period.Period
import cn.ipanel.bigdata.boot.source.genre.Phoenix
import cn.ipanel.bigdata.comm.Parameter.HBASE_DB_NAMESPACE
import cn.ipanel.bigdata.dw
import cn.ipanel.bigdata.dw.realtime.DeviceOnlineStatus
import cn.ipanel.bigdata.utils.{Dictionary, Util}
import org.apache.spark.sql.{DataFrame, Dataset, Row}
import org.apache.spark.sql.functions.{col, lit}

/**
 * Author: lzz
 * Date: 2022/3/30 15:21
 */
package object dws {

  final lazy val BUILD: String = "create schema if not exists " + HBASE_NAMESPACE
  final lazy val HBASE_NAMESPACE: String = Configuration.getParam(HBASE_DB_NAMESPACE)

  val T_DEVICE_TAG            : DeviceTag.type            = DeviceTag
  val T_DEVICE_INCR_TAG       : DeviceIncrTag.type        = DeviceIncrTag
  val T_HOME_CT               : HomeCt.type               = HomeCt

  def parseDateToPhoenixDate(_DAY: Day, period: Period): Int = {
    period match {
      // 20230101 => 23
      case Period.YEAR => _DAY.toYear % 100
      // 20230101 => 231
      case Period.QUARTER => _DAY.toYear % 100 * 10 + _DAY.toQuarter
      // 20230101 => 2301
      case Period.MONTH => _DAY.toYear % 100 * 100 + _DAY.toMonth
      // 20230101 => 23001(后三位是第几周的意思)
      case Period.WEEK => _DAY.toYear % 100 * 1000 + _DAY.asWeek.getWeekInYear
      // 默认天，20230101 => 230101
      case _ => _DAY.toSimpleDate
    }
  }

  trait DwsColumns extends BaseColumns {
    final val F_COUNT         : String = "f_count"                  // 次数

    // 定制化指标
    final val F_DURATION                    : String = "f_duration"                      // 时长，单位秒
    final val F_CPU_RATE                    : String = "f_cpu_rate"                      //
    final val F_MEMORY_RATE                 : String = "f_memory_rate"                   //
    final val F_CPU_EXCEED_COUNT            : String = "f_cpu_exceed_count"              // cpu超阈值次数
    final val F_CPU_EXCEED_DEVICE_COUNT     : String = "f_cpu_exceed_device_count"       // cpu超阈值次数
    final val F_MEMORY_EXCEED_COUNT         : String = "f_memory_exceed_count"           // 内存超阈值次数
    final val F_MEMORY_EXCEED_DEVICE_COUNT  : String = "f_memory_exceed_device_count"    // 内存超阈值次数
  }

  trait DwsConst extends Const {
    def saltBuckets: String = s"$SUM_REGION_SERVERS"
  }

  abstract class AbsDwsPhoenix(tbName: String)
          extends Phoenix(HBASE_NAMESPACE, tbName)
            with DwsConst
            with DwsColumns {

    override def emptyTable: Dataset[AbsDwsTable] = {
      import IMPLICITS._
      spark.createDataset(spark.sparkContext.emptyRDD[AbsDwsTable])
    }

    def find(day: Day, period: Period): DataFrame = {
      load.filter(col(F_PERIOD_TIME) === lit(parseDateToPhoenixDate(day, period)))
    }

    def drop(day: Day, period: Period): Unit = {
      val periodTime = parseDateToPhoenixDate(day, period)
      val dropSql = s"""
                      | delete from $getDBName.$getTBName
                      | where
                      | $F_PERIOD_TIME=$periodTime
                      |""".stripMargin
      _local_sql(dropSql)
    }


    /**
     * 处固定字段外，各个实现表独有的扩展字段
     * @return
     */
    def extraColumnNames: Seq[String]

    /**
     * phoenix表所有字段，为固定字段 + 各个实现表扩展字段
     * @return
     */
    override def getTBColumns: Seq[String] = {
      Seq(F_PERIOD_TIME, F_REGION, F_COUNT, F_DEVICE_COUNT) ++ extraColumnNames
    }

    /**
     * 各个扩展表独有字段的建表语句
     * 注意：必须以逗号打头
     * @return
     */
    def extraColumnBuild: String = ""

    /**
     * 各个扩展表独有的主键字段
     * @return
     */
    def extraPrimaryKeys: String = ""

    /**
     * 建表语句，为固定建表语句 + 各个扩展表扩展字段的建表语句 + 各个扩展表主键字段组成
     * @return
     */
    override def buildTable: String = {
      s"""
         | CREATE TABLE IF NOT EXISTS $getDBName.$getTBName (
         |  $F_PERIOD_TIME BIGINT NOT NULL
         |  ,$F_REGION VARCHAR NOT NULL
         |  ,$F_COUNT BIGINT
         |  ,$F_DEVICE_COUNT BIGINT
         |  $extraColumnBuild
         |  CONSTRAINT PK PRIMARY KEY
         |  ($F_PERIOD_TIME, $F_REGION $extraPrimaryKeys)
         | ) SALT_BUCKETS = $saltBuckets
         |""".stripMargin
    }
  }

  abstract class AbsDwsTable(var f_period_time: Long,
                             var f_region: Long,
                             var f_count: Long,
                             var f_device_count: Long)
        extends DwsConst
          with DwsColumns
          with Product
          with Serializable {

    import Util._

    lazy val _PARAM: Array[Any] = {
      Array(f_period_time, f_region, f_count, f_device_count) ++ toParam
    }

    def _init_(row: Row): AbsDwsTable = {
      f_period_time   = rowTo[Long](row, F_PERIOD_TIME, LONG0)
      f_region        = rowTo[Long](row, F_REGION, LONG0)
      f_count         = rowTo[Long](row, F_COUNT, LONG0)
      f_device_count  = rowTo[Long](row, F_DEVICE_COUNT, LONG0)
      this
    }

    def toParam: Array[Any]

    override def productElement(n: Int): Any = _PARAM(n)
    override def productArity: Int = _PARAM.length
    override def canEqual(that: Any): Boolean = _PARAM.contains(that)
  }
}