package com.shujia.mrjq

import org.apache.spark.internal.Logging
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
import org.slf4j.Logger

object DwsAppAcdMrjq extends Logging {
  val logger: Logger = log

  def main(args: Array[String]): Unit = {
    var ds: String = ""
    // 从args里提取分区日期参数
    if (args.isEmpty) {
      logger.error("请指定一个分区日期！")
      return
    }
    ds = args.head

    // 构建SparkSession
    val spark: SparkSession = SparkSession
      .builder()
      .appName("DwsAppVioZqzf")
      .enableHiveSupport()
      .getOrCreate()

    // 导入隐式转换已经函数包
    import spark.implicits._
    import org.apache.spark.sql.functions._

    // 加载事故表
    val dwdBaseAcdFile: DataFrame = spark.table("dwd.dwd_base_acd_file").where($"ds" === ds)

    // 使用DSL语法进行处理
    var baseDF: DataFrame = dwdBaseAcdFile
      // 提取事故发生日期
      .withColumn("tjrq", date_format($"sgfssj", "yyyy-MM-dd"))
      .groupBy($"tjrq")
      // 统计当日事故数、当日死亡事故数、当日事故死亡人数
      .agg(
        countDistinct($"sgbh") as "dr_sgs"
        , sum(when($"swrs30" > 0, 1).otherwise(0)) as "dr_swsgs"
        , sum($"swrs30") as "dr_swrs"
        , sum($"ssrs30") as "dr_ssrs"
        , sum($"zjccss") as "dr_zjccss"
      )
      .withColumn("year", year($"tjrq"))

    val indexList: List[String] = List("sgs", "swsgs", "swrs", "ssrs", "zjccss")

    // 通过循环 分别统计各项指标
    for (index <- indexList) {
      baseDF = baseDF
        // 使用窗口函数统计今年index人数
        .withColumn(s"jn_$index", sum($"dr_$index") over Window.partitionBy($"year"))
        // 使用窗口函数提取上一年的index人数
        .withColumn(s"qn_dr_$index", lag($"dr_$index", 1) over Window.partitionBy(date_format($"tjrq", "MM-dd")).orderBy($"year"))
        // 计算同比
        .withColumn(s"tb_$index", round(coalesce($"dr_$index" / $"qn_dr_$index" - 1, expr("0")), 4))
    }

    // 整理数据
    val resultDF: DataFrame = baseDF
      .select(
        $"tjrq"
        , $"dr_sgs"
        , $"jn_sgs"
        , $"tb_sgs"

        , $"dr_swsgs"
        , $"jn_swsgs"
        , $"tb_swsgs"

        , $"dr_swrs"
        , $"jn_swrs"
        , $"tb_swrs"

        , $"dr_ssrs"
        , $"jn_ssrs"
        , $"tb_ssrs"

        , $"dr_zjccss"
        , $"jn_zjccss"
        , $"tb_zjccss"
      )

    // 增加分区
    spark.sql(s"alter table dws.dws_app_acd_mrjq add if not exists partition(ds='$ds')")

    // 保存到结果表中
    resultDF
      .write
      .mode(SaveMode.Overwrite)
      .format("csv")
      .option("sep", "^")
      .save(s"/daas/motl/dws/dws_app_acd_mrjq/ds=$ds")
  }

  /**
   * 提交命令：
   * spark-submit --master yarn-client --conf spark.sql.shuffle.partitions=2 --class com.shujia.mrjq.DwsAppAcdMrjq car-1.0.jar 2023-06-14
   */

}
