package com.dyj.ads

import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.{DataFrame, SparkSession}

import javax.print.DocFlavor.STRING

object ads_e_mz_au5800_gysz {
  def main(args: Array[String]): Unit = {
    val ds: String = args(0)

    val sparkSession: SparkSession = SparkSession.builder()
      .appName("甘油三酯指标统计")
      .enableHiveSupport()
      .config("spark.sql.shuffle.partitions", 1)
      .getOrCreate()

    import sparkSession.implicits._
    import org.apache.spark.sql.functions._

    sparkSession.sql("use bigdata03_dws")

    val gyszDF: DataFrame = sparkSession.sql(
     s"""
        |select
        |baoGaoBianHao,
        |ganYouSanZhi,
        |baoGaoRiQi,
        |gysz_max,
        |gysz_min,
        |gysz_avg
        |from
        |bigdata03_dws.dws_e_mz_au5800shenghua
        |where
        |ds='${ds}'
        |""".stripMargin)

    gyszDF
      .withColumn("number",count($"baoGaoBianHao") over Window.partitionBy($"baoGaoBianHao"))
      .withColumn("e_fc",($"ganYouSanZhi"-$"gysz_avg")*($"ganYouSanZhi"-$"gysz_avg"))
      .withColumn("a_fc",sum($"e_fc").over())
      .withColumn("gysz_fc",$"a_fc" / $"number")
      .withColumn("gysz_bzc",sqrt($"gysz_fc"))
      .withColumn("gysz_ycgs", sum(when($"ganYouSanZhi" > 2.26, 1)).over())
      .select($"baoGaoBianHao",$"ganYouSanZhi",$"baoGaoRiQi",$"gysz_max",$"gysz_min",$"gysz_avg",$"gysz_fc",$"gysz_bzc",$"gysz_ycgs")
      .write
      .format("csv")
      .save("/daas/motl/bigdata03/ads/ads_e_mz_au5800_gysz/ds="+ds)
  }

}
