package ds_industry_2025.ds.YangJuan_2024.T3

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._

import java.util.Properties
/*
    请根据 dwd 层表计算出 2020 年 4 月每个省份的平均订单金额和所有省份平均订单金额
相比较结果（“高/低/相同”），存入 MySQL 数据库 shtd_result 的 provinceavgcmp 表（表
结构如下）中，然后在 Linux 的 MySQL 命令行中根据省份表主键、该省平均订单金额
均为降序排序，查询出前 5 条，将 SQL 语句复制粘贴至客户端桌面【Release\任务 B
提交结果.docx】中对应的任务序号下，将执行结果截图粘贴至客户端桌面【Release\
任务 B 提交结果.docx】中对应的任务序号下;
 */
object t3 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("t3")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    spark.table("dwd.fact_order_info")
      .where(year(col("create_time"))=== 2020 and month(col("create_time"))===4)
      .createOrReplaceTempView("order_info")

    spark.table("dwd.dim_province")
      .createOrReplaceTempView("province")

    val result = spark.sql(
      """
        |select
        |provinceid,provincename,
        |provinceconsumption,allprovinceconsumption,
        |case
        |when provinceconsumption > allprovinceconsumption then "高"
        |when provinceconsumption < allprovinceconsumption then "低"
        |else "相同"
        |end as comparsion
        |from(
        |select distinct
        |o.province_id as provinceid,
        |p.name as provincename,
        |avg(o.final_total_amount) over(partition by o.province_id,p.name) as provinceconsumption,
        |avg(o.final_total_amount) over() as allprovinceconsumption
        |from order_info as o
        |join province as p
        |on p.id=o.province_id
        |) as r1
        |""".stripMargin)


    val conn = new Properties()
    conn.setProperty("user", "root")
    conn.setProperty("password", "123456")
    conn.setProperty("driver", "com.mysql.jdbc.Driver")


    result.write.mode("overwrite")
      .jdbc("jdbc:mysql://192.168.40.110:3306/shtd_result?useSSL=false", "provinceavgcmp", conn)

    result.show

    //  select * from provinceavgcmp order by provinceid desc,provinceconsumption desc limit 5;




    spark.close()
  }

}
