package com.shujia.mrjq

import org.apache.spark.internal.Logging
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{DataFrame, Dataset, Row, SaveMode, SparkSession}
import org.slf4j.Logger

object DwsAppAcdMrjq extends Logging {
  val logger: Logger = log

  def main(args: Array[String]): Unit = {

    if (args.isEmpty) {
      logger.error("请指定分区日期")
      return
    }
    val dayId: String = args.head

    val spark: SparkSession = SparkSession
      .builder()
      .appName("DwsAppAcdMrjq")
      .enableHiveSupport()
      .getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._

    // 加载事故表
    val dwdBaseAcdFile: Dataset[Row] = spark.table("dwd.dwd_base_acd_file").where($"ds" === dayId)

    var baseDF: DataFrame = dwdBaseAcdFile
      .groupBy(date_format($"sgfssj", "yyyy-MM-dd") as "tjrq")
      .agg(
        countDistinct($"sgbh") as "dr_sgs"
        , sum(when($"swrs30" > 0, 1).otherwise(0)) as "dr_swsgs"
        , sum($"swrs24") as "dr_swrs"
        , sum($"ssrs24") as "dr_ssrs"
        , sum($"zjccss") as "dr_zjccss"
      )

    val indexes = List("sgs", "swsgs", "swrs", "ssrs", "zjccss")

    for (index <- indexes) {
      baseDF = baseDF
        .withColumn(s"jn_$index", sum($"dr_$index") over Window.partitionBy(year($"tjrq")))
        .withColumn(s"qn_dr_$index", lag($"dr_$index", 1) over Window.partitionBy(date_format($"tjrq", "MM-dd")).orderBy(year($"tjrq")))
        .withColumn(s"tb_$index", round(coalesce($"dr_$index" / $"qn_dr_$index" - 1, expr("1.0")), 4))
    }

    baseDF
      .select(
        $"tjrq"
        , $"dr_sgs"
        , $"jn_sgs"
        , $"tb_sgs"
        , $"dr_swsgs"
        , $"jn_swsgs"
        , $"tb_swsgs"
        , $"dr_swrs"
        , $"jn_swrs"
        , $"tb_swrs"
        , $"dr_ssrs"
        , $"jn_ssrs"
        , $"tb_ssrs"
        , $"dr_zjccss"
        , $"jn_zjccss"
        , $"tb_zjccss"
      )
      .write
      .mode(SaveMode.Overwrite)
      .format("csv")
      .option("sep", "^")
      .save(s"/daas/motl/dws/dws_app_acd_mrjq/ds=$dayId")

    spark.sql(
      s"""
        |alter table dws.dws_app_acd_mrjq add if not exists partition(ds='$dayId')
        |""".stripMargin)


  }
}
