package ds_industry_2025.ds.Formal_volume2.T3

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._

/*
    4、请根据dws_ds_hudi库中的表计算出每个省份2020年4月的平均订单金额和该省所在地区平均订单金额相比较结果（“高/低/相同”）,
    存入ClickHouse数据库shtd_result的provinceavgcmpregion表中（表结构如下），然后在Linux的ClickHouse命令行中根据省份表
    主键、省平均订单金额、地区平均订单金额均为降序排序，查询出前5条，将SQL语句复制粘贴至客户端桌面【Release\任务B提交结果.docx】
    中对应的任务序号下，将执行结果截图粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下；
 */
object t4 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("t4")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    val data_path="hdfs://192.168.40.110:9000/user/hive/warehouse/dws_ds_hudi.db/province_consumption_day_aggr"

    spark.read.format("hudi").load(data_path)
      .where(col("year")===2020 and col("month")===4)
      .createOrReplaceTempView("data")

    val result = spark.sql(
      """
        |select
        |provinceid,provincename,provinceavgconsumption,
        |region_id,region_name,region_avg as regionavgconsumption,
        |case
        |when provinceavgconsumption > region_avg then "高"
        |when provinceavgconsumption < region_avg then "低"
        |else "相同"
        |end as comparison
        |from(
        |select
        |provinceid,provincename,
        |province_avg as provinceavgconsumption,
        |region_id,region_name,
        |round(region_amount / region_count) as region_avg
        |from(
        |select distinct
        |province_id as provinceid,
        |province_name as provincename,
        |region_id,
        |region_name,
        |round(total_amount / total_count) as province_avg,
        |sum(total_amount) over(partition by region_id,region_name) as region_amount,
        |sum(total_count) over(partition by region_id,region_name) as region_count
        |from data
        |) as r1
        |) as r2
        |""".stripMargin)

    result.write.format("jdbc")
      .option("url","jdbc:clickhouse://192.168.40.110:8123/shtd_result")
      .option("user","default")
      .option("password","")
      .option("driver","com.clickhouse.jdbc.ClickHouseDriver")
      .option("dbtable","provinceavgcmpregion")
      .mode("append")
      .save()

    //   select * from provinceavgcmpregion order by provinceid desc ,provinceavgconsumption desc ,regionavgconsumption desc limit 5;




    spark.close()

  }

}
