package ds_industry_2025.ds.YangJuan_2024.T3

import org.apache.spark.sql.SparkSession

import java.util.Properties

/*
    根据 dwd 层表统计每个省份、每个地区、每个月下单的数量和下单的总金额，存入
MySQL 数据库 shtd_result 的 provinceeverymonth 表中（表结构如下），然后在 Linux 的
MySQL 命令行中根据订单总数、订单总金额、省份表主键均为降序排序，查询出前 5
条，将 SQL 语句复制粘贴至客户端桌面【Release\任务 B 提交结果.docx】中对应的任
务序号下，将执行结果截图粘贴至客户端桌面【Release\任务 B 提交结果.docx】中对应
的任务序号下；
 */
object t2 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("t2")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hdui.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

      spark.table("dwd.dim_region")
        .where("etl_date=(select max(etl_date) from dwd.dim_region)")
        .createOrReplaceTempView("region")

    spark.table("dwd.dim_province")
      .where("etl_date=(select max(etl_date) from dwd.dim_province)")
      .createOrReplaceTempView("province")

    spark.table("dwd.fact_order_info")
      .where("etl_date=(select max(etl_date) from dwd.fact_order_info)")
      .createOrReplaceTempView("order_info")

    val result = spark.sql(
      """
        |select distinct
        |o.province_id  as provinceid,
        |p.name as provincename,
        |p.region_id as regionid,
        |r.region_name as regionname,
        |sum(o.final_total_amount)
        |over(partition by year(o.create_time),month(o.create_time),p.region_id,r.region_name,o.province_id,p.name) as totalconsumption,
        |count(*)
        |over(partition by year(o.create_time),month(o.create_time),p.region_id,r.region_name,o.province_id,p.name) as totalorder,
        |year(o.create_time) as year,
        |month(o.create_time) as month
        |from order_info as o
        |join province as p
        |on p.id=o.province_id
        |join region as r
        |on r.id=p.region_id
        |""".stripMargin)

    val conn=new Properties()
    conn.setProperty("user","root")
    conn.setProperty("password","123456")
    conn.setProperty("driver","com.mysql.jdbc.Driver")


    result.write.mode("overwrite")
      .jdbc("jdbc:mysql://192.168.40.110:3306/shtd_result?useSSL=false","provinceeverymonth",conn)

    //  select * from provinceeverymonth order by totalorder desc,totalconsumption desc ,provinceid desc limit 5;


    result.show


    spark.close()
  }
}
