package ds_industry_2025.ds.ds02.T3

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{col, month, year}
/*
      4、请根据dws_ds_hudi库中的表计算出每个省份2020年4月的平均订单金额和该省所在地区平均订单金额相比较结果（“高/低/相同”）,存
      入ClickHouse数据库shtd_result的provinceavgcmpregion表中（表结构如下），然后在Linux的ClickHouse命令行中根据省份表主
      键、省平均订单金额、地区平均订单金额均为降序排序，查询出前5条，将SQL语句复制粘贴至客户端桌面【Release\任务B提交结果.docx】
      中对应的任务序号下，将执行结果截图粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下；
 */
object t4 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("t4")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    val region_path="hdfs://192.168.40.110:9000/user/hive/warehouse/dwd_ds_hudi.db/dim_region"
    spark.read.format("hudi").load(region_path).createOrReplaceTempView("region")
    val province_path="hdfs://192.168.40.110:9000/user/hive/warehouse/dwd_ds_hudi.db/dim_province"
    spark.read.format("hudi").load(province_path).createOrReplaceTempView("province")
    val order_path="hdfs://192.168.40.110:9000/user/hive/warehouse/dwd_ds_hudi.db/fact_order_info"
    spark.read.format("hudi").load(order_path).createOrReplaceTempView("order")

    spark.read.format("hudi").load(region_path)
      .where("etl_date=(select max(etl_date) from region)")
      .createOrReplaceTempView("region_info")

    spark.read.format("hudi").load(province_path)
      .where("etl_date=(select max(etl_date) from province)")
      .createOrReplaceTempView("province_info")

    spark.read.format("hudi").load(order_path)
      .where("etl_date=(select max(etl_date) from order)")
      .filter(year(col("create_time"))===2020)
      .filter(month(col("create_time"))===4)
      .createOrReplaceTempView("order_info")

    val result = spark.sql(
      """
        |select
        |province_id,province_name,p_avg as  provinceavgconsumption,
        |region_id,region_name, r_avg as regionavgconsumption,
        |case
        |when p_avg > r_avg  then '高'
        |when p_avg < r_avg then '低'
        |else '相同'
        |end as comparison
        |from(
        |select distinct
        |province_id,province_name,region_id,region_name,
        |avg(money) over(partition by region_id,region_name,province_id,province_name) as p_avg,
        |avg(money) over(partition by region_id,region_name) as r_avg
        |from(
        |select
        |o.province_id,
        |p.name as province_name,
        |p.region_id,
        |r.region_name,
        |o.final_total_amount as money
        |from order_info as o
        |join province_info as p
        |on p.id=o.province_id
        |join region_info as r
        |on p.region_id=r.id
        |) as  r1
        |) as r2
        |""".stripMargin)

    result.show

//    result.write.format("jdbc")
//      .option("url","jdbc:clickhouse://192.168.40.110:8123/shtd_result")
//      .option("user","default")
//      .option("password","")
//      .option("driver","com.clickhouse.jdbc.ClickHouseDriver")
//      .option("dbtable","provinceavgcmpregion")
//      .mode("append")
//      .save()





    spark.close()
  }

}
