package ds_industry_2025.ds.ds01.T3

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{col, lit, month, to_date, year}

import java.util.Properties

/*
    3、请根据dwd层表计算出2020年4月每个省份的平均订单金额和所有省份平均订单金额相比较结果（“高/低/相同”）,存入MySQL数据
    库shtd_result的provinceavgcmp表（表结构如下）中，然后在Linux的MySQL命令行中根据省份表主键、该省平均订单金额均为降序排
    序，查询出前5条，将SQL语句复制粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下，将执行结果截图粘贴至客户端
    桌面【Release\任务B提交结果.docx】中对应的任务序号下;
 */
object t3 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("t3")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    val conn=new Properties()
    conn.setProperty("user","root")
    conn.setProperty("password","123456")
    conn.setProperty("driver","com.mysql.jdbc.Driver")


    spark.table("dwd.dim_province")
      .where("etl_date=(select max(etl_date) from dwd.dim_province)")
      .select("id","name")
      .createOrReplaceTempView("province")

    spark.table("dwd.fact_order_info")
      .where("etl_date=(select max(etl_date) from dwd.fact_order_info)")
      .select("final_total_amount","create_time","province_id")
      .filter(year(col("create_time"))===2020)
      .filter(month(col("create_time"))===4)
      .createOrReplaceTempView("order")

    //  todo 先计算所有省份的平均订单金额
    val money_avg = spark.sql(
      """
        |select
        |(r1.money_sum / r1.order_count) as avg_money
        |from(
        |select
        |count(*) as order_count,
        |sum(final_total_amount) as money_sum
        |from order
        |) as r1
        |""".stripMargin).collect()(0).get(0).toString

    println("所有省份的平均订单金额为:",money_avg)

    val result = spark.sql(
      s"""
         |select
         |r2.provinceid,r2.provincename,
         |r2.province_avg as provinceavgconsumption,
         |$money_avg as allprovinceavgconsumption,
         |case
         |when r2.province_avg > cast('$money_avg' as double) then '高'
         |when r2.province_avg < cast('$money_avg' as double) then '低'
         |else "相同"
         |end as comparison
         |from(
         |select
         |r1.provinceid,r1.provincename,
         |(r1.province_sum / r1.province_count) as province_avg
         |from(
         |select distinct
         |o.province_id as provinceid,
         |p.name as provincename,
         |count(*) over(partition by o.province_id,p.name) as province_count,
         |sum(final_total_amount) over(partition by o.province_id,p.name) as province_sum
         |from order as o
         |join province as p
         |on p.id=o.province_id
         |) as r1
         |) as r2
         |""".stripMargin)

    result.write.mode("overwrite")
      .jdbc("jdbc:mysql://192.168.40.110:3306/shtd_result?useSSL=false","provinceavgcmp",conn)


    spark.close()

    //  select * from shtd_result.provinceavgcmp order by provinceid desc,provinceavgconsumption desc limit 5;


    // todo 下面是汤浩学长的代码，更加简短
//    spark.sql(
//      """
//        |select
//        |provinceid,
//        |provincename,
//        |provinceavgconsumption,
//        |allprovinceavgconsumption,
//        |case
//        |        when provinceavgconsumption > allprovinceavgconsumption then '高'
//        |        when provinceavgconsumption < allprovinceavgconsumption then '低'
//        |        else '相同'
//        |      end as comparison
//        |from(
//        |select
//        |distinct
//        |p.id as provinceid,
//        |p.name as provincename,
//        |avg(i.final_total_amount) over(partition by p.id ) as provinceavgconsumption,
//        |avg(i.final_total_amount) over() as allprovinceavgconsumption
//        |from
//        |fact_order_info i
//        |join dim_province p on i.province_id = p.id
//        |where year(i.create_time) = 2020 and month(i.create_time)=4
//        |) order by provinceid desc,provinceavgconsumption desc limit 5
//        |""".stripMargin).show()

  }

}
