package cn.ipanel.bigdata.job.basic

import cn.ipanel.bigdata.boot.Job
import cn.ipanel.bigdata.boot.logger.Logger
import cn.ipanel.bigdata.dw.dim.phoenix.{T_HOME => H}
import cn.ipanel.bigdata.dw.ods.{T_REPORT => R}
import cn.ipanel.bigdata.utils.Dictionary._
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.functions._

/**
 * @author lzz
 * @environment IntelliJ IDEA 2020.3.1
 * @projectName bigdata_panyu
 * @date 2023/11/25 10:44
 * @description: 底量家庭表，包含了新增，删除，修改的家庭
 */
class HomeDetail extends Job {

  override def onStartup(): Unit = {
    val df = R.find(_DAY)
      .filter(col(R.F_SERVICE) === lit(Service.SERVICE_HOME))
      .withColumn("unix_time", Functions.func_mapToL(col(R.F_EXTRA), lit(Home.FIELD_STATUS_MODIFY_TIME), lit(0)))
      .withColumn(H.F_HOME_ID, Functions.func_mapToS(col(R.F_EXTRA), lit(Home.FIELD_HOME_ID), lit(UNKNOWN)))
      .withColumn(F_STATUS, Functions.func_mapToI(col(R.F_EXTRA), lit(Device.FIELD_ACTION), lit(BusState.INVALID)))
      .withColumn(H.F_REGION, Functions.func_mapToL(col(R.F_EXTRA), lit(Device.FIELD_AREA_CODE), lit(INVALID_LONG)))
      .filter(col(F_STATUS) =!= lit(BusState.INVALID) and col(H.F_HOME_ID) =!= lit(UNKNOWN) and col(H.F_REGION) =!= lit(INVALID_LONG))
      .withColumn(H.F_UPDATE_TIME, when(col(F_STATUS) =!= lit(BusState.ADD), from_unixtime(col("unix_time"))).otherwise(null))
      .withColumn(H.F_CREATE_TIME, when(col(F_STATUS) === lit(BusState.ADD), from_unixtime(col("unix_time"))).otherwise(null))
      // 防止多次编辑的情况，以最近的为准
      .withColumn("rank_num", row_number() over Window.partitionBy(H.F_HOME_ID).orderBy(col(R.F_TIMESTAMP).desc))
      .filter(col("rank_num") === lit(1))
      .select(H.getTBColumns.head, H.getTBColumns.tail: _*)

    df.persist()
    Logger.I("new home from report count: " + df.count())
    Logger.I("new home from report details: ")
    df.show(false)
    if(df.isEmpty) {
      return
    }

    import spark.implicits._
    // 已经删除的底量数据不动
    val df0 = H.load.filter(col(F_STATUS) =!= lit(BusState.DEL))
      .select(H.getTBColumns.head, H.getTBColumns.tail: _*)
    df0.persist()
    df0.show(5, false)

    val res = df0
      .union(
        // 让添加的家庭，两个时间一样
        df.filter(col(F_STATUS) === lit(BusState.ADD))
          .withColumn(H.F_UPDATE_TIME, col(H.F_CREATE_TIME))
      )
      .alias("a")
      .join(
        // 修改和删除的设备，一定在底量表里面，left join即可
        df.filter(col(F_STATUS) =!= lit(BusState.ADD)).alias("b")
        ,$"a.${H.F_HOME_ID}" === $"b.${H.F_HOME_ID}"
        ,"left"
      )
      .selectExpr(
        // left join 后，凡是 b 表字段为空，均代表这是新增的家庭
        s"if(b.${H.F_REGION} is null, a.${H.F_REGION}, b.${H.F_REGION}) as ${H.F_REGION}",
        s"if(b.$F_STATUS is null, a.$F_STATUS, b.$F_STATUS) as ${F_STATUS}",
        s"a.${H.F_HOME_ID}",
        // f_create_time 在编辑和删除都不能修改
        s"a.${H.F_CREATE_TIME}",
        s"if(b.${H.F_UPDATE_TIME} is null, a.${H.F_UPDATE_TIME},b.${H.F_UPDATE_TIME}) as ${H.F_UPDATE_TIME}"
      )
//    res.persist()
//    res.filter(col(H.F_DEVICE_ID).isin("test_1", "test_2", "test_3", "007d76b9705544a2892b152f999dd05a", "0a357b7536614c73aae8a550c8157d3f"))
//      .show(false)
//    res.printSchema()
    H.save(res)

  }

}