package DianShang_2024.ds_07.indicator

import org.apache.spark.sql.SparkSession

object indicator04 {
  def main(args: Array[String]): Unit = {
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("指标计算第四题")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
//      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
//      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    spark.sql("use dwd07")
    spark.table("dwd07.fact_order_info")
      .where("etl_date=(select max(etl_date) from fact_order_info)")
      .dropDuplicates()
      .createOrReplaceTempView("order_info")

    spark.table("dwd07.dim_province")
      .where("etl_date=(select max(etl_date) from dim_province)")
      .createOrReplaceTempView("province")



    spark.sql(
      """
        |select
        |p.name as province_name,
        |count(*) as  amount
        |from order_info as o
        |join province as p
        |on p.id=o.province_id
        |group by p.name
        |order by amount desc
        |""".stripMargin).createOrReplaceTempView("datasource")

    spark.sql("select * from datasource").show

    spark.sql(
      """
        |select
        |*
        |from(
        |select * from datasource
        |) as r1
        |pivot(
        |max(amount)
        |for province_name in ('贵州' as guizhou,'青海' as qinghai)
        |)
        |
        |""".stripMargin).show









    spark.close()
  }

}
