package com.shujia.traffic.dws

import com.shujia.traffic.common.SparkUtil
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object DwsAppAcdMrjqDay extends SparkUtil{

    /**
     * SparkSQL任务提交的命令：
     * spark-submit --master local --conf spark.sql.shuffle.partitions=2 --class com.shujia.traffic.dws.DwsAppAcdMrjqDay traffic-1.0-SNAPSHOT.jar
     */


  override def run(): Unit = {
    val spark: SparkSession = sparkSession

    import org.apache.spark.sql.functions._
    import spark.implicits._


    // 加载dwd层事故事实表的数据
    val acdDF: DataFrame = spark.table("dwd.dwd_base_acd_file_msk_d").where($"pt" === day)

    // 统计每天的事故数、死亡事故数、死亡人数、受伤人数、直接财产损失
    var t1DF: DataFrame = acdDF
      .groupBy(substring($"sgfssj", 1, 10) as "tjrq")
      .agg(
        countDistinct($"sgbh") as "dr_sgs"
        , sum(when($"swrs" + $"swrs24" + $"swrs3" + $"swrs7" + $"swrs30" + $"swrsq" > 0, 1).otherwise(0)) as "dr_swsgs"
        , sum($"swrs" + $"swrs24" + $"swrs3" + $"swrs7" + $"swrs30" + $"swrsq") as "dr_swrs"
        , sum($"ssrs" + $"ssrs24") as "dr_ssrs"
        , sum($"zjccss") as "dr_zjccss"
      )

    // 统计每月的事故数
    var grpDyCntDF: DataFrame = acdDF
      .groupBy(substring($"sgfssj", 1, 7) as "tjny")
      .agg(
        countDistinct($"sgbh") as "dy_sgs"
        , sum(when($"swrs" + $"swrs24" + $"swrs3" + $"swrs7" + $"swrs30" + $"swrsq" > 0, 1).otherwise(0)) as "dy_swsgs"
        , sum($"swrs" + $"swrs24" + $"swrs3" + $"swrs7" + $"swrs30" + $"swrsq") as "dy_swrs"
        , sum($"ssrs" + $"ssrs24") as "dy_ssrs"
        , sum($"zjccss") as "dy_zjccss"
      )

    // 定义一个List保存需要统计的种类
    val indexList: List[String] = List[String]("sgs", "swsgs", "swrs", "ssrs", "zjccss")
    indexList.foreach(index => {
      // 依次通过循环统计每个指标
      t1DF = t1DF
        // 统计今年事故数，通过窗口聚合统计
        .withColumn(s"jn_${index}", sum($"dr_${index}") over Window.partitionBy(year($"tjrq")))

      grpDyCntDF = grpDyCntDF
        // 取去年当月的index数
        .withColumn(s"last_dy_${index}", lag($"dy_${index}", 1) over Window.partitionBy(month($"tjny")).orderBy(year($"tjny")))
        // 计算index数同比
        .withColumn(s"tb_${index}", concat(round(coalesce($"dy_${index}" / $"last_dy_${index}" - 1, expr("0")) * 100, 2), expr("'%'")))
    })


    // 通过年月关联
    val resDF: DataFrame = t1DF
      .join(grpDyCntDF, substring($"tjrq", 1, 7) === $"tjny", "left")
      // 整理数据
      .select(
        $"tjrq"
        , $"dr_sgs"
        , $"jn_sgs"
        , $"tb_sgs"
        , $"dr_swsgs"
        , $"jn_swsgs"
        , $"tb_swsgs"
        , $"dr_swrs"
        , $"jn_swrs"
        , $"tb_swrs"
        , $"dr_ssrs"
        , $"jn_ssrs"
        , $"tb_ssrs"
        , $"dr_zjccss"
        , $"jn_zjccss"
        , $"tb_zjccss"
      )

    // 创建对应结果表的分区
    spark.sql(
      s"""
        |alter table dws.dws_app_acd_mrjq_d add if not exists partition (pt = '$day')
        |""".stripMargin)


    // 写入结果表中
    resDF
      .write
      .format("csv")
      .option("sep", "|")
      .mode(SaveMode.Overwrite)
      // 只会自动创建目录，不会创建分区
      .save(s"/project/traffic/dws/dws_app_acd_mrjq_d/pt=$day")

  }
}
