package ods_industry_2024.ods_02.indicator_count_hudi.indicator_02

import org.apache.spark.sql.SparkSession

object test_04 {
  def main(args: Array[String]): Unit = {
    /*
          请根据dws_ds_hudi库中的表计算出每个省份2020年4月的平均订单金额和该省所在地区平均订单金额相比较结果（“高/低/相同”）,存
          入ClickHouse数据库shtd_result的provinceavgcmpregion表中（表结构如下），然后在Linux的ClickHouse命令行中根据省份表主键、省平均
          订单金额、地区平均订单金额均为降序排序，查询出前5条，将SQL语句复制粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下，将执
          行结果截图粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下；
     */
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("第二套卷子指标第四题")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSession")
      .enableHiveSupport()
      .getOrCreate()

    spark.table("dwd_ds_hudi_02.fact_order_info").createOrReplaceTempView("temp01")
    spark.table("dwd_ds_hudi_02.fact_order_info")
      .where("etl_date=(select max(etl_date) from temp01)")
      .distinct()
      .createOrReplaceTempView("order_info")




    spark.table("dwd_ds_hudi_02.dim_province").createOrReplaceTempView("temp02")
    spark.table("dwd_ds_hudi_02.dim_province")
      .where("etl_date=(select max(etl_date) from temp02)")
      .distinct()
      .createOrReplaceTempView("province")




    spark.table("dwd_ds_hudi_02.dim_region").createOrReplaceTempView("temp03")
    spark.table("dwd_ds_hudi_02.dim_region")
      .where("etl_date=(select max(etl_date) from temp03)")
      .createOrReplaceTempView("region")







    val result=spark.sql(
      """
        |select distinct
        |*
        |from(
        |select
        |r1.provinceid,r1.provincename,r1.provinceavgconsumption,r1.regionid,r1.regionname,r1.regionavgconsumption,
        |case
        |when r1.provinceavgconsumption > r1.regionavgconsumption then "高"
        |when r1.provinceavgconsumption < r1.regionavgconsumption then "低"
        |else "相同"
        |end
        |as comparison
        |from(
        |select
        |o.province_id as provinceid,
        |p.name as provincename,
        |round(avg(o.final_total_amount) over(partition by o.province_id,p.name),2) as provinceavgconsumption,
        |r.id as regionid,
        |r.region_name as regionname,
        |round(avg(o.final_total_amount) over(partition by r.id,r.region_name),2) as regionavgconsumption
        |from order_info as o
        |join province as p
        |on p.id=o.province_id
        |join region as r
        |on r.id=p.region_id
        |where Year(to_date(o.create_time,"yyyyMMdd"))=2020 and Month(to_date(o.create_time,"yyyyMMdd"))=4
        |) as r1
        |) as r2
        |""".stripMargin)

    result.write.mode("append")
      .format("jdbc")
      .option("url","jdbc:clickhouse://192.168.40.110:8123/hudi_indicator")
      .option("user","default")
      .option("password","")
      .option("dbtable","provinceavgcmpregion_02")
      .option("driver","com.clickhouse.jdbc.ClickHouseDriver")
      .save()

    println("完成")









    spark.close()


  }
}
