package DianShang_2024.ds_server.indicator

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{current_timestamp, date_format, lit, to_timestamp}

object trait09 {
  def main(args: Array[String]): Unit = {
    /*
            9、根据dwd或者dws层的相关表，请计算2022年4月26日凌晨0点0分0秒到早上9点59分59秒为止，该时间段每小时的新增订单金额与当天订单总金额累加值，存
            入ClickHouse数据库shtd_result的accumulateconsumption表中，然后在Linux的ClickHouse命令行中根据订单时间段升序排序，查询出前5条;
     */
    //  准备sparksql的环境
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("指标计算第九题")
      .enableHiveSupport()
      .getOrCreate()

    //  由于之前清洗数据时，fact_order_master表里面的create_time只保留了年月日没有时分秒，所以下面重新加载一遍数据
    /*
          如果是20220315095959这种格式的字符串，转化为时间类型，需要先使用to_timestamp(time,'yyyyMMddHHmmss')转化为yyyyMMddHHmmss，然后再
          使用date_format()转化为yyyy-MM-dd HH:mm:ss
     */

    spark.sql(
        """
          |select
          |order_id ,
          |order_sn ,
          |customer_id ,
          |shipping_user ,
          |province ,
          |city ,
          |address ,
          |order_source ,
          |payment_method ,
          |order_money ,
          |district_money ,
          |shipping_money ,
          |payment_money ,
          |shipping_comp_name ,
          |shipping_sn ,
          |date_format(to_timestamp(create_time,'yyyyMMddHHmmss'),'yyyy-MM-dd HH:mm:ss') as create_time ,
          |shipping_time ,
          |pay_time ,
          |receive_time ,
          |order_status ,
          |order_point ,
          |invoice_title ,
          |modified_time,
          |etl_date
          |from ods_server.order_master01
          |where etl_date='20240311'  and length(city) <= 8
          |""".stripMargin)
      .withColumn("dwd_insert_user", lit("user1"))
      .withColumn(
        "dwd_insert_time",
        to_timestamp(date_format(current_timestamp(), "yyyy-MM-dd HH:mm:ss"), "yyyy-MM-dd HH:mm:ss").cast("timestamp")
      )
      .withColumn("dwd_modify_user", lit("user1"))
      .withColumn(
        "dwd_modify_time",
        to_timestamp(date_format(current_timestamp(), "yyyy-MM-dd HH:mm:ss"), "yyyy-MM-dd HH:mm:ss").cast("timestamp")
      )
      .createOrReplaceTempView("temp_table")

    spark.sql("select create_time from temp_table limit 20").show

    //  先拿到订单信息里面所有在2022年4月26日凌晨0点0分0秒到早上9点59分59秒为止这个时间段里面的数据
    spark.sql(
      """
        |select
        |*
        |from temp_table
        |where create_time > '2022-04-26 00:00:00' and create_time < '2022-04-26 10:00:00'
        |""".stripMargin).createOrReplaceTempView("temp01")

    spark.sql("select * from temp01 limit 20").show

    //  然后根据题目要求求数据
    val result_data=spark.sql(
      """
        |select
        |order_hour,
        |hour_money,
        |sum(hour_money) over(rows between unbounded preceding and current row) as leijia_number
        |from(
        |select
        |order_hour,
        |sum(order_money) as  hour_money
        |from(
        |select
        |order_id,
        |order_money,
        |substr(cast(create_time as string) ,1,13) as order_hour
        |from temp01
        |) as t1
        |group by order_hour
        |order by cast(substr(order_hour,-1,1) as int)
        |) as t2
        |""".stripMargin)
    result_data.createOrReplaceTempView("result_table")

    spark.sql("select * from result_table limit 20").show

    //  将数据加载到clickhouse
    result_data.write
      .format("jdbc")
      .option("url","jdbc:clickhouse://192.168.40.110:8123/shtd_result")
      .option("user","default")
      .option("password","")
      .option("dbtable","accumulateconsumption")
      .option("driver","com.clickhouse.jdbc.ClickHouseDriver")
      .mode("append")
      .save()

    //  关闭sparksql的环境
    spark.close()
  }

}
