package DianShang_2024.ds_server.indicator

import org.apache.spark.sql.SparkSession

object trait05 {
  def main(args: Array[String]): Unit = {
    /*
            5、请根据dwd或者dws层表来计算每个省份2022年订单金额前3城市，依次存入ClickHouse数据库shtd_result的regiontopthree表中（表结构如下），然后在
            Linux的ClickHouse命令行中根据省份升序排序，查询出前5条；
     */
    //  准备sparksql的环境
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("指标计算第五题")
      .enableHiveSupport()
      .getOrCreate()

    /*
          处理的逻辑:1.首先查询出每个省份的每个城市的订单金额值
                    2.然后使用开窗排序函数对每个省份的每个城市的值进行排序，并且赋值序号
                    3.然后根据序号只选出前三的城市
     */

    //  first_table:得到每个省份，订单金额前三的城市的名字和金额值
    spark.sql(
      """
        |select
        |*
        |from(
        |select
        |province_name,
        |city_name,
        |CityOrderMoney_sum,
        |row_number() over(partition by province_name order by CityOrderMoney_sum desc) as row_number
        |from(
        |select
        |province as province_name,
        |city as city_name,
        |sum(order_money) as CityOrderMoney_sum
        |from dwd_server.fact_order_master
        |where year(date_format(to_timestamp(create_time,'yyyyMMdd'),'yyyy-MM-dd'))=2022
        |group by province_name,city_name
        |) as t1
        |) as t2
        |where row_number <=3
        |""".stripMargin).createOrReplaceTempView("first_table")

    //  查看数据
    spark.sql("select * from first_table  limit 20").show


    //  two_table:对每个省份的城市的名字和订单值进行聚合
    /*
          concat_ws('聚合时的分隔符',聚合的字段):对行或者列的数据进行聚合
          collect_list(字段名):用于将某个列的数据聚合到一起，形成一个数组返回
     */
    val result_data=spark.sql(
      """
        |select
        |province_name,
        |concat_ws(',' , collect_list(city_name) ) as  city_names,
        |concat_ws(',' , collect_list(CityOrderMoney_sum) ) as  Moneys
        |from first_table
        |group by province_name
        |""".stripMargin)
    result_data.createOrReplaceTempView("two_table")

    spark.sql("select * from two_table  ").show

    //  将数据加载到clickhouse
    result_data.write
      .format("jdbc")
      .option("url","jdbc:clickhouse://192.168.40.110:8123/shtd_result")
      .option("user","default")
      .option("password","")
      .option("driver","com.clickhouse.jdbc.ClickHouseDriver")
      .option("dbtable","regiontopthree")
      .mode("append")
      .save()         //   触发对数据的保存



    //  关闭saprksql的环境
    spark.close()
  }

}
