package com.shujia.mrjq

import org.apache.spark.internal.Logging
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.slf4j.Logger

object DwsAppVioZqzf extends Logging {
  val logger: Logger = log

  def main(args: Array[String]): Unit = {

    if (args.isEmpty) {
      logger.error("请指定分区日期")
      return
    }
    val dayId: String = args.head

    val spark: SparkSession = SparkSession
      .builder()
      .appName("DwsAppVioZqzf")
      .enableHiveSupport()
      .getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._


    // 加载现场以及非现场违法表数据
    val dwdBaseVioSurveil: DataFrame = spark.table("dwd.dwd_base_vio_surveil").where($"ds" === dayId)
    val dwdBaseVioForce: DataFrame = spark.table("dwd.dwd_base_vio_force").where($"ds" === dayId)

    // 取出非现场违法表中的违法时间以及违法行为
    val dwdBaseVioSurveilWfxw: DataFrame = dwdBaseVioSurveil
      .select(
        date_format($"wfsj", "yyyy-MM-dd") as "wfsj"
        , regexp_replace($"wfxw", "[a-zA-Z]$", "") as "wfxw"
      )
    // 取出现场违法表中的违法时间以及违法行为
    val dwdBaseVioForceWfxw: DataFrame = dwdBaseVioForce
      .select(
        date_format($"wfsj", "yyyy-MM-dd") as "wfsj"
        , explode(array($"wfxw1", $"wfxw2", $"wfxw3", $"wfxw4", $"wfxw5")) as "wfxw"
      ).select(
      $"wfsj"
      , regexp_replace($"wfxw", "[a-zA-Z]$", "") as "wfxw"
    )

    // 加载违法行为映射数据
    val kjMap: Map[String, List[String]] = ZqzfUtil.getKJ

    var baseDF: DataFrame = dwdBaseVioSurveilWfxw
      .union(dwdBaseVioForceWfxw)
      .where($"wfsj".isNotNull)
      .groupBy($"wfsj")
      .agg(
        count("*") as "dr_zd_wfs"
        , sum(when($"wfxw".isInCollection(kjMap("zjjs")), 1).otherwise(0)) as "dr_zjjs_wfs"
        , sum(when($"wfxw".isInCollection(kjMap("yjjs")), 1).otherwise(0)) as "dr_yjjs_wfs"
        , sum(when($"wfxw".isInCollection(kjMap("zdhp")), 1).otherwise(0)) as "dr_zdhp_wfs"
        , sum(when($"wfxw".isInCollection(kjMap("wzbz")), 1).otherwise(0)) as "dr_wzbz_wfs"
        , sum(when($"wfxw".isInCollection(kjMap("tpjp")), 1).otherwise(0)) as "dr_tpjp_wfs"
        , sum(when($"wfxw".isInCollection(kjMap("wdtk")), 1).otherwise(0)) as "dr_wdtk_wfs"
        , sum(when($"wfxw".isInCollection(kjMap("jspz")), 1).otherwise(0)) as "dr_jspz_wfs"
        , sum(when($"wfxw".isInCollection(kjMap("wxjs")), 1).otherwise(0)) as "dr_wxjs_wfs"
        , sum(when($"wfxw".isInCollection(kjMap("cy")), 1).otherwise(0)) as "dr_cy_wfs"
        , sum(when($"wfxw".isInCollection(kjMap("cz")), 1).otherwise(0)) as "dr_cz_wfs"
      )
    val indexes = List("zd", "zjjs", "yjjs", "zdhp", "wzbz", "tpjp", "wdtk", "jspz", "wxjs", "cy", "cz")
    for (index <- indexes) {
      baseDF = baseDF
        // 使用sum结合窗口统计今年的违法数
        .withColumn(s"jn_${index}_wfs", sum($"dr_${index}_wfs") over Window.partitionBy(year($"wfsj")))
        // 使用lag窗口函数获取去年同期的违法数
        .withColumn(s"qn_dr_${index}_wfs", lag($"dr_${index}_wfs", 1) over Window.partitionBy(date_format($"wfsj", "MM-dd")).orderBy(year($"wfsj")))
        // 计算不同违法行为的同比
        .withColumn(s"${index}_tb", round(coalesce($"dr_${index}_wfs" / $"qn_dr_${index}_wfs" - 1, expr("1.0")), 4))

    }

    val dwsAppVioZqzf: DataFrame = baseDF
      .select(
        $"wfsj"
        , $"dr_zd_wfs"
        , $"jn_zd_wfs"
        , $"zd_tb"
        , $"dr_zjjs_wfs"
        , $"jn_zjjs_wfs"
        , $"zjjs_tb"
        , $"dr_yjjs_wfs"
        , $"jn_yjjs_wfs"
        , $"yjjs_tb"
        , $"dr_zdhp_wfs"
        , $"jn_zdhp_wfs"
        , $"zdhp_tb"
        , $"dr_wzbz_wfs"
        , $"jn_wzbz_wfs"
        , $"wzbz_tb"
        , $"dr_tpjp_wfs"
        , $"jn_tpjp_wfs"
        , $"tpjp_tb"
        , $"dr_wdtk_wfs"
        , $"jn_wdtk_wfs"
        , $"wdtk_tb"
        , $"dr_jspz_wfs"
        , $"jn_jspz_wfs"
        , $"jspz_tb"
        , $"dr_wxjs_wfs"
        , $"jn_wxjs_wfs"
        , $"wxjs_tb"
        , $"dr_cy_wfs"
        , $"jn_cy_wfs"
        , $"cy_tb"
        , $"dr_cz_wfs"
        , $"jn_cz_wfs"
        , $"cz_tb"
      )

    dwsAppVioZqzf.createOrReplaceTempView("dws_app_vio_zqzf_tmp")

    spark.sql(
      s"""
         |insert overwrite table dws.dws_app_vio_zqzf partition(ds='$dayId')
         |select * from dws_app_vio_zqzf_tmp
         |""".stripMargin)


  }

}
