package cn.ipanel.bigdata.dw.dim.phoenix

import cn.ipanel.bigdata.boot.date.Day
import cn.ipanel.bigdata.boot.period.Period
import cn.ipanel.bigdata.boot.source.genre.Phoenix
import cn.ipanel.bigdata.dw.dws.HBASE_NAMESPACE
import cn.ipanel.bigdata.utils.Dictionary.{BusState, F_STATUS}
import org.apache.spark.sql.functions.{col, lit}
import org.apache.spark.sql.{DataFrame, Dataset, Row}

/**
 * @author lzz
 * @environment IntelliJ IDEA 2020.3.1
 * @projectName bigdata_panyu   番禺大数据，区域数据
 * @date 2023/11/14 16:15
 * @description:
 */
protected[phoenix] object IOTDevice extends Phoenix(HBASE_NAMESPACE, "t_iot_device") with Area {

  final val F_DEVICE_ID                   : String = "f_device_id"
  final val F_DEVICE_CLASS                : String = "f_device_class"
  final val F_REGION                      : String = "f_region"
  final val F_CREATE_TIME                 : String = "f_create_time"
  final val F_UPDATE_TIME                 : String = "f_update_time"      // 状态变更时间，删除和更新均需要

  override def getTBColumns: Seq[String] = Seq(
    F_DEVICE_ID, F_REGION, F_STATUS, F_DEVICE_CLASS, F_CREATE_TIME, F_UPDATE_TIME
  )

  override def buildTable: String = {
    s"""
       |CREATE TABLE IF NOT EXISTS $getDBName.$getTBName(
       |$F_DEVICE_ID VARCHAR NOT NULL
       |, $F_REGION BIGINT
       |, $F_STATUS TINYINT
       |, $F_DEVICE_CLASS VARCHAR
       |, $F_CREATE_TIME VARCHAR
       |, $F_UPDATE_TIME VARCHAR
       |CONSTRAINT PK PRIMARY KEY($F_DEVICE_ID)
       |) SALT_BUCKETS = $saltBuckets;
       |""".stripMargin
  }

  def loadPeriodValidDevice(day: Day, period: Period = Period.DAY): DataFrame = period match {
    case Period.DAY =>
      load
        // 所有设备现在看来必须是的有效设备，或者是统计天周期后面删除的
        .filter(col(F_CREATE_TIME) < lit(day.next().toTime) and (col(F_STATUS) =!= lit(BusState.DEL) or col(F_UPDATE_TIME) >= lit(day.next().toTime)))
    case Period.WEEK =>
      load
        .filter(col(F_CREATE_TIME) < lit(day.asWeek.next().toTime) and (col(F_STATUS) =!= lit(BusState.DEL) or col(F_UPDATE_TIME) >= lit(day.asWeek.next().toTime)))
    case Period.MONTH =>
      load
        .filter(col(F_CREATE_TIME) < lit(day.asMonth.next().toTime) and (col(F_STATUS) =!= lit(BusState.DEL) or col(F_UPDATE_TIME) >= lit(day.asMonth.next().toTime)))
    case Period.YEAR =>
      load
        .filter(col(F_CREATE_TIME) < lit(day.asYear.next().toTime) and (col(F_STATUS) =!= lit(BusState.DEL) or col(F_UPDATE_TIME) >= lit(day.asYear.next().toTime)))
    case _ => null
  }

  override def emptyTable: Dataset[_] = {
    import IMPLICITS._
    spark.createDataset(spark.sparkContext.emptyRDD[DeviceTable])
  }

  case class DeviceTable(var f_region: Long
                        , var f_status: Byte
                        , var f_device_id: String
                        , var f_device_class: String
                        , var f_create_time: String
                        , var f_update_time: String
                       )

  object DeviceTable {
    def apply(row: Row): DeviceTable = {
      DeviceTable(row.getAs[Long](F_REGION)
        , row.getAs[Byte](F_STATUS)
        , row.getAs[String](F_DEVICE_ID)
        , row.getAs[String](F_DEVICE_CLASS)
        , row.getAs[String](F_CREATE_TIME)
        , row.getAs[String](F_UPDATE_TIME)
      )
    }
  }
}