package DianShang_2024.ds_server.indicator

import org.apache.spark.sql.SparkSession

object trait02 {
  def main(args: Array[String]): Unit = {
    /*
          2、根据dwd或者dws层表统计每个城市每月下单的数量和下单的总金额（以order_master中的地址为判断依据），并按照province_name，year，month进行分组,
          按照total_amount逆序排序，形成sequence值，将计算结果存入Hive的dws数据库city_consumption_day_aggr表中（表结构如下），然后使用hive cli根据订
          单总数、订单总金额均为降序排序，查询出前5条，在查询时对于订单总金额字段将其转为bigint类型（避免用科学计数法展示）；
     */

    //  准备sparksql的环境
    val spark=SparkSession.builder()
      .appName("指标计算第二题")
      .master("local[*]")
      .enableHiveSupport()
      .getOrCreate()

    spark.sql("use dws")

    //  设置动态分区
  spark.sql("set hive.exec.dynamic.partition.mode=nonstrict")

//  t1:首先查询出每个城市每个月的订单总金额和订单数量
    spark.sql(
      """
        |select
        |city as city_name,
        |province as province_name,
        |cast(sum(order_money) as bigint) as total_amount,
        |count(*) as total_count,
        |year(date_format(to_timestamp(create_time,'yyyyMMdd'),'yyyy-MM-dd')) as year,
        |month(date_format(to_timestamp(create_time,'yyyyMMdd'),'yyyy-MM-dd')) as month
        |from dwd_server.fact_order_master
        |group by province_name,city_name,year,month
        |""".stripMargin).createOrReplaceTempView("t1")

//      spark.sql("select  * from t1 limit 10").show

    //  t2:根据订单金额进行排序得到sequence
    spark.sql(
      """
        |select
        |city_name,
        |province_name,
        |total_amount,
        |total_count,
        |row_number() over(partition by province_name,year,month order by total_amount desc) as row_number,
        |year,
        |month
        |from t1
        |""".stripMargin).createOrReplaceTempView("t2")

    spark.sql("select * from t2  where province_name='浙江省'  limit 50").show

    //  根据题目要求创建表格
    spark.sql(
      """
        |create table if not exists dws.city_consumption_day_aggr(
        |city_name string,
        |province_name string,
        |total_amount double,
        |total_count int,
        |sequence int
        |)
        |partitioned by(year int,month int)
        |""".stripMargin)

    //  根据条件将数据插入表格
    spark.sql(
      """
        |insert into table dws.city_consumption_day_aggr
        |partition(year,month)
        |select
        |*
        |from t2
        |""".stripMargin)





    //  关闭sparksql的环境
    spark.close()
  }

}
