package com.shujia.ycsb

import org.apache.spark.internal.Logging
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{Dataset, Row, SaveMode, SparkSession}
import org.slf4j.Logger

object DwsAppVioWfjltjMaxsb extends Logging {
  val logger: Logger = log

  def main(args: Array[String]): Unit = {

    if (args.isEmpty) {
      logger.error("请指定分区日期")
      return
    }
    val dayId: String = args.head

    val spark: SparkSession = SparkSession
      .builder()
      .appName("DwsAppVioWfjltjMaxsb")
      .enableHiveSupport()
      .getOrCreate()

    import spark.implicits._
    import org.apache.spark.sql.functions._

    // 加载非现场违法表
    val dwdBaseVioSurveilDF: Dataset[Row] = spark.table("dwd.dwd_base_vio_surveil").where($"ds" === dayId)

    // 统计每台设备每天上报的违法数量
    dwdBaseVioSurveilDF
      .where($"sbbh".isNotNull and $"sbbh" =!= "")
      .groupBy($"sbbh", date_format($"wfsj", "yyyy-MM-dd") as "wfsj")
      .agg(count("*") as "sbwfs_day")
      // 统计每台设备有记录的总天数
      .withColumn("total_cnt", count("*") over Window.partitionBy($"sbbh"))
      // 计算Q1和Q3对应的位置
      .withColumn("q1_pos", ceil($"total_cnt" / 4))
      .withColumn("q3_pos", floor($"total_cnt" * 3 / 4))
      // 基于设备每天上报的违法数量进行排名
      .withColumn("rn", row_number() over Window.partitionBy($"sbbh").orderBy($"sbwfs_day"))
      // 基于Q1和Q3的位置以及rn 取出Q1、Q3位置对应的值
      .withColumn("q1_value", sum(when($"q1_pos" === $"rn", $"sbwfs_day").otherwise(0)) over Window.partitionBy($"sbbh"))
      .withColumn("q3_value", sum(when($"q3_pos" === $"rn", $"sbwfs_day").otherwise(0)) over Window.partitionBy($"sbbh"))
      // 计算上边界 过滤出异常的记录
      .withColumn("max_value", $"q3_value" + expr("1.5") * ($"q3_value" - $"q1_value"))
      .where($"sbwfs_day" > $"max_value" and $"sbwfs_day" > 2)
      .select(
        $"sbbh"
        , $"sbwfs_day"
        , $"max_value" as "zdgjz"
        , $"wfsj" as "wfsj_day"
      )
      .write
      .mode(SaveMode.Overwrite)
      .format("csv")
      .option("sep", "^")
      .save(s"/daas/motl/dws/dws_app_vio_wfjltj_maxsb/ds=$dayId")

    spark.sql(
      s"""
         |alter table dws.dws_app_vio_wfjltj_maxsb add if not exists partition(ds='$dayId')
         |""".stripMargin)

  }
}
