package com.shujia.batch

import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object DwsAppVioZqzf {
  def main(args: Array[String]): Unit = {


    //分区参数
    val ds: String = args.head

    /**
     * 船舰spark sql环境
     *
     */

    val spark: SparkSession = SparkSession
      .builder()
      .appName("DwsAppVioZqzf")
      .enableHiveSupport() //开启hie元数据支持
      .getOrCreate()


    import spark.implicits._
    import org.apache.spark.sql.functions._


    /**
     * 1、非现场违法表
     */

    val vioSurveil: DataFrame = spark
      .table("dwd.dwd_base_vio_surveil")
      .where($"ds" === ds)
      .select(date_format($"wfsj", "yyyy-MM-dd") as "wfsj", $"wfxw")

    /**
     * 2、现场违法表
     *
     */
    val vioForce: DataFrame = spark
      .table("dwd.dwd_base_vio_force")
      .where($"ds" === ds)


    val unionDF: DataFrame = vioForce
      //取出所有的违法行为
      .select($"wfsj", explode(array($"wfxw1", $"wfxw2", $"wfxw3", $"wfxw4", $"wfxw5")) as "wfxw")
      //去除违法行为后面的标记
      .withColumn("wfxw", regexp_replace($"wfxw", "[A-Z]$", ""))
      //取出数据
      .select(date_format($"wfsj", "yyyy-MM-dd") as "wfsj", $"wfxw")
      //合并现场违法表和非现场违法表
      .union(vioSurveil.select($"wfsj", $"wfxw"))


    //获取指标口径
    val indexs: Map[String, List[String]] = ZqzfIndexUtils.getIndexs

    var baseDF: DataFrame = unionDF
      .groupBy($"wfsj")
      .agg(
        count($"wfsj") as "dr_zd_wfs",
        //醉酒驾驶
        sum(when($"wfxw".isInCollection(indexs("zjjs")), 1).otherwise(0)) as "dr_zjjs_wfs",
        sum(when($"wfxw".isInCollection(indexs("yjjs")), 1).otherwise(0)) as "dr_yjjs_wfs",
        sum(when($"wfxw".isInCollection(indexs("zdhp")), 1).otherwise(0)) as "dr_zdhp_wfs",
        sum(when($"wfxw".isInCollection(indexs("wzbz")), 1).otherwise(0)) as "dr_wzbz_wfs",
        sum(when($"wfxw".isInCollection(indexs("tpjp")), 1).otherwise(0)) as "dr_tpjp_wfs",
        sum(when($"wfxw".isInCollection(indexs("wdtk")), 1).otherwise(0)) as "dr_wdtk_wfs",
        sum(when($"wfxw".isInCollection(indexs("jspz")), 1).otherwise(0)) as "dr_jspz_wfs",
        sum(when($"wfxw".isInCollection(indexs("wxjs")), 1).otherwise(0)) as "dr_wxjs_wfs",
        sum(when($"wfxw".isInCollection(indexs("cy")), 1).otherwise(0)) as "dr_cy_wfs",
        sum(when($"wfxw".isInCollection(indexs("cz")), 1).otherwise(0)) as "dr_cz_wfs"
      )

    val indexNames = List("zd", "zjjs", "yjjs", "zdhp", "wzbz", "tpjp", "wdtk", "jspz", "wxjs", "cy", "cz")

    for (indexName <- indexNames) {
      //循环计算所有指标
      baseDF = comIndex(baseDF, spark, indexName)
    }

    //整理数据
    val resultDF: DataFrame = baseDF.select(
      $"wfsj",
      $"dr_zd_wfs",
      $"jn_zd_wfs",
      $"zd_tb",
      $"zd_tbbj",
      $"dr_zjjs_wfs",
      $"jn_zjjs_wfs",
      $"zjjs_tb",
      $"zjjs_tbbj",
      $"dr_yjjs_wfs",
      $"jn_yjjs_wfs",
      $"yjjs_tb",
      $"yjjs_tbbj",
      $"dr_zdhp_wfs",
      $"jn_zdhp_wfs",
      $"zdhp_tb",
      $"zdhp_tbbj",
      $"dr_wzbz_wfs",
      $"jn_wzbz_wfs",
      $"wzbz_tb",
      $"wzbz_tbbj",
      $"dr_tpjp_wfs",
      $"jn_tpjp_wfs",
      $"tpjp_tb",
      $"tpjp_tbbj",
      $"dr_wdtk_wfs",
      $"jn_wdtk_wfs",
      $"wdtk_tb",
      $"wdtk_tbbj",
      $"dr_jspz_wfs",
      $"jn_jspz_wfs",
      $"jspz_tb",
      $"jspz_tbbj",
      $"dr_wxjs_wfs",
      $"jn_wxjs_wfs",
      $"wxjs_tb",
      $"wxjs_tbbj",
      $"dr_cy_wfs",
      $"jn_cy_wfs",
      $"cy_tb",
      $"cy_tbbj",
      $"dr_cz_wfs",
      $"jn_cz_wfs",
      $"cz_tb",
      $"cz_tbbj"
    )

    //保存数据
    resultDF.write
      .format("csv")
      .option("sep", "^")
      .mode(SaveMode.Overwrite)
      .save(s"/daas/motl/dws/dws_app_vio_zqzf/ds=$ds")

    //增加分区
    spark.sql(
      s"""
         |alter table dws.dws_app_vio_zqzf add IF NOT EXISTS  partition  (ds='$ds')
         |
         |""".stripMargin)

  }

  def comIndex(df: DataFrame, spark: SparkSession, indexName: String): DataFrame = {

    import spark.implicits._
    import org.apache.spark.sql.functions._

    df
      //今年
      .withColumn(s"jn_${indexName}_wfs", sum($"dr_${indexName}_wfs") over Window.partitionBy(year($"wfsj")))
      //去年当日
      .withColumn(s"qu_dr_${indexName}_wfs", lag($"dr_${indexName}_wfs", 1) over Window.partitionBy(date_format($"wfsj", "MM-dd")).orderBy(year($"wfsj")))
      //计算同比
      .withColumn(s"${indexName}_tb", round(($"dr_${indexName}_wfs" - $"qu_dr_${indexName}_wfs") / $"qu_dr_${indexName}_wfs" * 100, 2))
      //同比标识
      .withColumn(s"${indexName}_tbbj", when($"dr_${indexName}_wfs" > $"qu_dr_${indexName}_wfs", "上升").otherwise("下降"))

  }
}
