package com.shujia.mrjq

import org.apache.spark.internal.Logging
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
import org.slf4j.Logger

object DwsAppVioZqzf extends Logging {

  val logger: Logger = log

  def main(args: Array[String]): Unit = {

    var ds: String = ""
    // 从args里提取分区日期参数
    if (args.isEmpty) {
      logger.error("请指定一个分区日期！")
      return
    }
    ds = args.head

    // 构建SparkSession
    val spark: SparkSession = SparkSession
      .builder()
      .appName("DwsAppVioZqzf")
      .enableHiveSupport()
      .getOrCreate()

    // 导入隐式转换已经函数包
    import spark.implicits._
    import org.apache.spark.sql.functions._

    val dwdBaseVioSurveilDF = spark.table("dwd.dwd_base_vio_surveil").where($"ds" === ds)
      .select($"wfsj", $"wfxw")

    val dwdBaseVioForceDF: DataFrame = spark
      .table("dwd.dwd_base_vio_force").where($"ds" === ds)
      .select($"wfsj", explode(array($"wfxw1", $"wfxw2", $"wfxw3", $"wfxw4", $"wfxw5")) as "wfxw")

    var baseDF: DataFrame = dwdBaseVioForceDF
      // 不会去重
      .union(dwdBaseVioSurveilDF)
      .withColumn("wfsj_day", date_format($"wfsj", "yyyy-MM-dd"))
      .where($"wfxw".isNotNull and $"wfsj_day".isNotNull)
      .groupBy($"wfsj_day")
      .agg(
        count($"wfxw") as "dr_zd_wfs"
        , sum(when($"wfxw".isInCollection(ZqzfUtil.getCode("zjjs")), 1).otherwise(0)) as "dr_zjjs_wfs"
        , sum(when($"wfxw".isInCollection(ZqzfUtil.getCode("yjjs")), 1).otherwise(0)) as "dr_yjjs_wfs"
        , sum(when($"wfxw".isInCollection(ZqzfUtil.getCode("zdhp")), 1).otherwise(0)) as "dr_zdhp_wfs"
        , sum(when($"wfxw".isInCollection(ZqzfUtil.getCode("wzbz")), 1).otherwise(0)) as "dr_wzbz_wfs"
        , sum(when($"wfxw".isInCollection(ZqzfUtil.getCode("tpjp")), 1).otherwise(0)) as "dr_tpjp_wfs"
        , sum(when($"wfxw".isInCollection(ZqzfUtil.getCode("wdtk")), 1).otherwise(0)) as "dr_wdtk_wfs"
        , sum(when($"wfxw".isInCollection(ZqzfUtil.getCode("jspz")), 1).otherwise(0)) as "dr_jspz_wfs"
        , sum(when($"wfxw".isInCollection(ZqzfUtil.getCode("wxjs")), 1).otherwise(0)) as "dr_wxjs_wfs"
        , sum(when($"wfxw".isInCollection(ZqzfUtil.getCode("cy")), 1).otherwise(0)) as "dr_cy_wfs"
        , sum(when($"wfxw".isInCollection(ZqzfUtil.getCode("cz")), 1).otherwise(0)) as "dr_cz_wfs"
      )
      .withColumn("year", year($"wfsj_day"))

    val wfxwList = List[String]("zd", "zjjs", "yjjs", "zdhp", "wzbz", "tpjp", "wdtk", "jspz", "wxjs", "cy", "cz")
    for (wfxw <- wfxwList) {
      baseDF = baseDF
        // 使用窗口函数统计今年index人数
        .withColumn(s"jn_${wfxw}_wfs", sum($"dr_${wfxw}_wfs") over Window.partitionBy($"year"))
        // 使用窗口函数提取上一年的index人数
        .withColumn(s"qn_dr_${wfxw}_wfs", lag($"dr_${wfxw}_wfs", 1) over Window.partitionBy(date_format($"wfsj_day", "MM-dd")).orderBy($"year"))
        // 计算同比
        .withColumn(s"${wfxw}_tb", round(coalesce($"dr_${wfxw}_wfs" / $"qn_dr_${wfxw}_wfs" - 1, expr("0")), 4))
    }

    // 整理数据
    val resultDF: DataFrame = baseDF
      .select(
        $"wfsj_day" as "wfsj"
        , $"dr_zd_wfs"
        , $"jn_zd_wfs"
        , $"zd_tb"
        , $"dr_zjjs_wfs"
        , $"jn_zjjs_wfs"
        , $"zjjs_tb"
        , $"dr_yjjs_wfs"
        , $"jn_yjjs_wfs"
        , $"yjjs_tb"
        , $"dr_zdhp_wfs"
        , $"jn_zdhp_wfs"
        , $"zdhp_tb"
        , $"dr_wzbz_wfs"
        , $"jn_wzbz_wfs"
        , $"wzbz_tb"
        , $"dr_tpjp_wfs"
        , $"jn_tpjp_wfs"
        , $"tpjp_tb"
        , $"dr_wdtk_wfs"
        , $"jn_wdtk_wfs"
        , $"wdtk_tb"
        , $"dr_jspz_wfs"
        , $"jn_jspz_wfs"
        , $"jspz_tb"
        , $"dr_wxjs_wfs"
        , $"jn_wxjs_wfs"
        , $"wxjs_tb"
        , $"dr_cy_wfs"
        , $"jn_cy_wfs"
        , $"cy_tb"
        , $"dr_cz_wfs"
        , $"jn_cz_wfs"
        , $"cz_tb"
      )

    // 增加分区
    spark.sql(s"alter table dws.dws_app_vio_zqzf add if not exists partition(ds='$ds')")
    // 保存数据
    resultDF
      .write
      .mode(SaveMode.Overwrite)
      .format("csv")
      .option("sep", "^")
      .save(s"/daas/motl/dws/dws_app_vio_zqzf/ds=$ds")
  }
  /**
   * 提交命令：
   * spark-submit --master yarn-client --conf spark.sql.shuffle.partitions=2 --class com.shujia.mrjq.DwsAppVioZqzf car-1.0.jar 2023-06-14
   */

}
