package ds_industry_2025.ds.YangJuan_2024.T3

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.functions._

import java.util.Properties

/*
      20、根据dwd层或dws层的相关表，请计算2022年4月26日凌晨0点0分0秒到早上9点59分59秒为止的数据，以5个小时为时间窗口，滑动的步长
      为1小时，做滑动窗口计算该窗口内订单总金额和订单总量，时间不满5小时不触发计算（即从凌晨5点0分0秒开始触发计算），存
      入ClickHouse数据库shtd_result的slidewindowconsumption表中，然后在Linux的ClickHouse命令行中根据订单时间段升序排序，查
      询出前5条，将核心业务代码中的开窗相关代码与MySQL查询结果展示出来。
假如数据为：
用户	订单时间	订单金额
张三1号	2020-04-26 00:00:10	10
李四1号	2020-04-26 00:20:10	25
李四2号	2020-04-26 01:21:10	10
李四2号	2020-04-26 02:21:10	5
王五1号	2020-04-26 03:20:10	20
李四2号	2020-04-26 04:20:10	10
王五2号	2020-04-26 05:10:10	10
李四2号	2020-04-26 06:20:10	10
赵六2号	2020-04-26 07:10:10	10
赵六2号	2020-04-26 08:10:10	10
王五2号	2020-04-26 09:11:10	10
王五4号	2020-04-26 09:32:10	30
计算结果则为：
订单时间段	该窗口内订单金额	订单总量	平均每单价格
2020-04-26 04	80	6	13.33
2020-04-26 05	55	5	11
2020-04-26 06	55	5	11
2020-04-26 07	60	5	12
2020-04-26 08	50	5	10
2020-04-26 09	80	6	13.33


slidewindowconsumption表结构如下：
字段	类型	中文含义	备注
consumptiontime	varchar	订单时间段
consumptionsum	double	该窗口内的订单总金额
consumptioncount	double	订单总数量
consumptionavg	double	平均每单价格	上面两个字段相除，四舍五入保留两位小数
 */
object t20 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("t20")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    val conn=new Properties()
    conn.setProperty("user","root")
    conn.setProperty("password","123456")
    conn.setProperty("driver","com.mysql.jdbc.Driver")


    val data = spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false", "order_info", conn)
      .select("create_time", "final_total_amount")
      .filter(col("create_time").between("2020-04-26 00:00:00", "2020-04-26 09:59:59"))

    data.createOrReplaceTempView("data")

//    spark.sql("select create_time from data").show

    //  todo range between :根据值来选择区间，如果是根据行来选择的话就是row between
    //   interval 5 hours preceding: interval :时间间隔    意思是 往前间隔5个小时
    //   and current row:到当前行
    //  连起来就是根据值来分区，往前5个小时到当前行
//    spark.sql(
//      """
//        |with
//        |window_data as (
//        |select distinct
//        |create_time,
//        |final_total_amount,
//        |sum(final_total_amount)
//        |over(order by cast(create_time as timestamp) range between interval 5 hours and current row) as time_5_total_amount,
//        |count(*)
//        |over(order by cast(create_time as timestamp) range between interval 5 hours and current row) as time_5_count
//        |from data
//        |),
//        |window2 as (
//        |select
//        |create_time,
//        |final_total_amount,
//        |time_5_amount / time_5_count
//        |from window_data
//        |)
//        |""".stripMargin)


    //  todo
    val result = spark.sql(
      """
        |with
        |temp01 as (
        |select distinct
        |time,
        |sum(final_total_amount) over(partition by time) as hours_money,
        |count(*) over(partition by time) as hours_count
        |from(
        |select
        |date_format(create_time,"yyyy-MM-dd HH") as time,
        |final_total_amount
        |from data
        |where create_time >= "2020-04-26 00:00:00" and create_time < "2020-04-26 10:00:00"
        |) as r1
        |)
        |
        |select
        |time,
        |money,count,
        |round((money / count),2) as avg
        |from(
        |select distinct
        |time,
        |sum(hours_money) over(order by time rows between 4  preceding and current row) as money,
        |count(hours_count) over(order by time rows between 4  preceding and current row) as count
        |from temp01
        |) as r1
        |where hour(time) > 5
        |""".stripMargin)


    result.show


//    data.withColumn(sum,dense_rank().over(
//      Window.rowsBetween(-4,Window.currentRow)
//    )



    spark.close()
  }

}
