package com.shujia.ycsb

import org.apache.spark.internal.Logging
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
import org.apache.spark.sql.expressions.Window
import org.slf4j.Logger

object DwsAppVioWsjtj extends Logging {

  val logger: Logger = log

  def main(args: Array[String]): Unit = {

    var ds: String = ""
    // 从args里提取分区日期参数
    if (args.isEmpty) {
      logger.error("请指定一个分区日期！")
      return
    }
    ds = args.head

    // 构建SparkSession
    val spark: SparkSession = SparkSession
      .builder()
      .appName("DwsAppVioWsjtj")
      .enableHiveSupport()
      .getOrCreate()

    // 导入隐式转换已经函数包
    import spark.implicits._
    import org.apache.spark.sql.functions._

    val resultDF: DataFrame = spark
      .table("dwd.dwd_base_vio_surveil")
      .where($"ds" === ds and $"sbbh".isNotNull and $"sbbh" =!= "")
      .select($"sbbh", date_format($"wfsj", "yyyy-MM-dd") as "wfsj_day")
      .distinct()
      .withColumn("after_day", coalesce(lag($"wfsj_day", 1) over Window.partitionBy($"sbbh").orderBy($"wfsj_day"),$"wfsj_day"))
      // 计算设备无数据间隔的天数
      .withColumn("sjc", when($"wfsj_day" === $"after_day", expr("0")).otherwise(datediff($"wfsj_day", $"after_day") - 1))
      // 计算每个数据的位置
      .withColumn("rn", row_number() over Window.partitionBy($"sbbh").orderBy($"sjc"))
      // 分别计算q1和q3的位置
      .withColumn("q1_pos", ceil((count("*") over Window.partitionBy($"sbbh")) / 4))
      .withColumn("q3_pos", floor((count("*") over Window.partitionBy($"sbbh")) * 3 / 4))
      // 基于q1、q3位置提取对应位置上的值
      .withColumn("q1", sum(when($"rn" === $"q1_pos", $"sjc").otherwise(expr("0"))) over Window.partitionBy($"sbbh"))
      .withColumn("q3", sum(when($"rn" === $"q3_pos", $"sjc").otherwise(expr("0"))) over Window.partitionBy($"sbbh"))
      // 计算 上边界
      .withColumn("max_val", $"q3" + ($"q3" - $"q1") * 1.5)
      // 过滤
      .where($"sjc" > $"max_val")
      // 计算超过的比例
      .withColumn("more_zb", coalesce(round($"sjc" / $"max_val" - 1, 4), expr("0")))
      // 整理数据
      .select(
        $"sbbh"
        , $"wfsj_day"
        , $"after_day"
        , $"sjc"
        , $"more_zb"
      )

    // 增加分区
    spark.sql(s"alter table dws.dws_app_vio_wsjtj add if not exists partition(ds='$ds')")
    // 保存数据
    resultDF
      .write
      .mode(SaveMode.Overwrite)
      .format("csv")
      .option("sep", "^")
      .save(s"/daas/motl/dws/dws_app_vio_wsjtj/ds=$ds")

  }
  /**
   * 提交命令：
   * spark-submit --master yarn-client --conf spark.sql.shuffle.partitions=2 --class com.shujia.ycsb.DwsAppVioWsjtj car-1.0.jar 2023-06-14
   */
}
