package ds_industry_2025.ds.ds01.T3

import org.apache.spark.sql.SparkSession

import java.util.Properties

/*
    2、根据dwd层表统计每个省份、每个地区、每个月下单的数量和下单的总金额，存入MySQL数据库shtd_result的provinceeverymonth表
    中（表结构如下），然后在Linux的MySQL命令行中根据订单总数、订单总金额、省份表主键均为降序排序，查询出前5条，将SQL语句复制粘贴
    至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下，将执行结果截图粘贴至客户端桌面【Release\任务B提交结果.docx】
    中对应的任务序号下;
 */
object t2 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("t2")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

      val conn=new Properties()
      conn.setProperty("user","root")
      conn.setProperty("password","123456")
      conn.setProperty("driver","com.mysql.jdbc.Driver")

    //  todo 提取选出分析需要用到的字段，减轻内存的损耗，加快分析的速度
    spark.table("dwd.dim_region")
      .where("etl_date=(select max(etl_date) from dwd.dim_region)")
      .select("id","region_name")
      .createOrReplaceTempView("region")

    spark.table("dwd.dim_province")
      .where("etl_date=(select max(etl_date) from dwd.dim_province)")
      .select("id","name","region_id")
      .createOrReplaceTempView("province")

    spark.table("dwd.fact_order_info")
      .where("etl_date=(select max(etl_date) from dwd.fact_order_info)")
      .select("final_total_amount","create_time","province_id")
      .createOrReplaceTempView("order")


    val result=spark.sql(
      """
        |select distinct
        |r1.province_id as provinceid,
        |r1.province_name as provincename,
        |r1.region_id as regionid,
        |r1.region_name as regionname,
        |sum(money) over(
        |partition by r1.region_id,r1.region_name,r1.province_id,r1.province_name,r1.year,r1.month
        |) as totalconsumption,
        |count(*) over(
        |partition by r1.region_id,r1.region_name,r1.province_id,r1.province_name,r1.year,r1.month
        |) as totalorder,
        |r1.year,r1.month
        |from(
        |select
        |r.id as region_id,
        |r.region_name,
        |p.id as province_id,
        |p.name as province_name,
        |o.final_total_amount as money,
        |year(o.create_time) as year,
        |month(o.create_time) as month
        |from order as o
        |join province  as p
        |on p.id=o.province_id
        |join region as r
        |on r.id=p.region_id
        |) as r1
        |""".stripMargin)


    result.write.mode("overwrite")
      .jdbc("jdbc:mysql://192.168.40.110:3306/shtd_result?useSSL=false","provinceeverymonth",conn)

    println("写入完成")

    //  select * from provinceeverymonth order by totalorder desc ,totalconsumption desc,provinceid desc limit 5;











    spark.close()
  }
}
