package cn.ipanel.bigdata.job.dim.hive

import cn.ipanel.bigdata.boot.Job
import cn.ipanel.bigdata.boot.logger.Logger
import cn.ipanel.bigdata.dw.dim.hive.{T_DEVICE_DAILY_STATUS => T}
import cn.ipanel.bigdata.dw.ods.{T_REPORT => S}
import cn.ipanel.bigdata.utils.Dictionary.{F_DEVICE_ID, F_REGION, F_STATUS}
import cn.ipanel.bigdata.utils.Dictionary.Service.{SERVICE_OFFLINE, SERVICE_ONLINE}
import cn.ipanel.bigdata.utils.Dictionary.DeviceStat._
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.functions.{col, first, from_unixtime, last, lit, max, row_number, when}

/**
 * @author lzz
 * @environment IntelliJ IDEA 2020.3.1
 * @projectName bigdata_panyu
 * @date 2023/12/13 10:05
 * @description: 用户在线状态，按天保存（状态具有跨天延续性，需要结合上一天的状态来推导当天的状态）
 */
class DeviceStatusByDay extends Job {

  override def onStartup(): Unit = {
    val NOW_STATUS = "new_status"
    val LAST_STATUS_TIMESTAMP = "last_status_timestamp"
    val LAST_ONLINE_TIMESTAMP = "last_online_timestamp"
    val LAST_OFFLINE_TIMESTAMP = "last_offline_timestamp"
    val lastDayDf = T.find(_DAY.prev())
      .select(F_DEVICE_ID, F_REGION, F_STATUS, T.F_LAST_ONLINE_TIME, T.F_LAST_OFFLINE_TIME)
    val reportDf = S.find(_DAY)
      .filter(col(S.F_SERVICE).isin(SERVICE_ONLINE, SERVICE_OFFLINE))
      .drop(S.F_EXTRA, S.F_DATE_TIME)
      .withColumn(NOW_STATUS, when(col(S.F_SERVICE) === lit(SERVICE_ONLINE), lit(ONLINE)).otherwise(lit(OFFLINE)))
      // 获取两种业务类型下，最后一次在线和离线时间
      .withColumn(LAST_STATUS_TIMESTAMP, last(S.F_TIMESTAMP, ignoreNulls = true) over Window.partitionBy(F_DEVICE_ID, F_REGION, NOW_STATUS).orderBy(S.F_TIMESTAMP).rowsBetween(Window.unboundedPreceding, Window.unboundedFollowing))
      .groupBy(F_DEVICE_ID, F_REGION)
      .agg(
        first(when(col(NOW_STATUS) === lit(ONLINE), col(LAST_STATUS_TIMESTAMP)).otherwise(null), ignoreNulls = true) as LAST_ONLINE_TIMESTAMP,
        first(when(col(NOW_STATUS) === lit(OFFLINE), col(LAST_STATUS_TIMESTAMP)).otherwise(null), ignoreNulls = true) as LAST_OFFLINE_TIMESTAMP
      )
      .withColumn(F_STATUS, when(col(LAST_ONLINE_TIMESTAMP).isNotNull and col(LAST_OFFLINE_TIMESTAMP).isNull, lit(ONLINE))
        .when(col(LAST_ONLINE_TIMESTAMP).isNull and col(LAST_OFFLINE_TIMESTAMP).isNotNull, lit(OFFLINE))
        .when(col(LAST_ONLINE_TIMESTAMP).isNotNull and col(LAST_OFFLINE_TIMESTAMP).isNotNull and col(LAST_ONLINE_TIMESTAMP) >= col(LAST_OFFLINE_TIMESTAMP), lit(ONLINE))
        .otherwise(lit(OFFLINE)))
      .withColumn(T.F_LAST_ONLINE_TIME, from_unixtime(col(LAST_ONLINE_TIMESTAMP)/1000))
      .withColumn(T.F_LAST_OFFLINE_TIME, from_unixtime(col(LAST_OFFLINE_TIMESTAMP)/1000))

    // 昨天的累计数据不为空，说明历史数据要跟当天合并
    if (!lastDayDf.isEmpty) {
      Logger.I("在线历史数据不为空，合并(merge)数据...")
      import spark.implicits._
      T.save(
        reportDf.select(F_DEVICE_ID, F_REGION, F_STATUS, T.F_LAST_ONLINE_TIME, T.F_LAST_OFFLINE_TIME)
          .alias("a")
          .join(lastDayDf.alias("b"), $"a.$F_DEVICE_ID" === $"b.$F_DEVICE_ID", "outer")
          .selectExpr(
            s"if(a.$F_DEVICE_ID is not null, a.$F_DEVICE_ID, b.$F_DEVICE_ID) as $F_DEVICE_ID",
            s"if(a.$F_REGION is not null, a.$F_REGION, b.$F_REGION) as $F_REGION",
            s"if(a.$F_STATUS is not null, a.$F_STATUS, b.$F_STATUS) as $F_STATUS",
            s"if(a.${T.F_LAST_ONLINE_TIME} is not null, a.${T.F_LAST_ONLINE_TIME}, b.${T.F_LAST_ONLINE_TIME}) as ${T.F_LAST_ONLINE_TIME}",
            s"if(a.${T.F_LAST_OFFLINE_TIME} is not null, a.${T.F_LAST_OFFLINE_TIME}, b.${T.F_LAST_OFFLINE_TIME}) as ${T.F_LAST_OFFLINE_TIME}",
            s"${_DAY.toDate} as ${T.F_DATE}"
          )
      )
    } else {
      Logger.I("在线历史数据为空，使用今天数据初始化(init)...")
      // 初始化累计数据
      T.save(
        reportDf.withColumn(T.F_DATE, lit(_DAY.toDate))
          .select(T.getTBColumns.head, T.getTBColumns.tail: _*)
      )
    }


  }

}