package DianShang_2024.ds_03.indicator

import org.apache.spark.sql.SparkSession

import java.util.Properties

object trait03 {
  def main(args: Array[String]): Unit = {
    /*
          3、请根据dws层表province_consumption_day_aggr计算出每个省份2020年4月的平均订单金额和该省所在地区平均订单金额相比较结果
          （“高/低/相同”）,存入MySQL数据库shtd_result的provinceavgcmpregion表中（表结构如下），然后在Linux的MySQL命令行中根据省份表主
          键、省平均订单金额、地区平均订单金额均为降序排序，查询出前5条，将SQL语句复制粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的
          任务序号下，将执行结果截图粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下;
     */
    //  准备环境
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("指标计算第三题")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .enableHiveSupport()
      .getOrCreate()

    //  准备连接mysql的配置
    val mysql_connect=new Properties()
    mysql_connect.setProperty("user","root")
    mysql_connect.setProperty("password","123456")
    mysql_connect.setProperty("driver","com.mysql.jdbc.Driver")

    //  读取到指定时间段的数据
    spark.sql(
      """
        |select
        |*
        |from(
        |select distinct * from dws03.province_consumption_day_aggr
        |where year='2020' and month='4'
        |) as t1
        |""".stripMargin).createOrReplaceTempView("temp")

    // 首先分析出每个省份的平均订单金额(ceil:向上取整)
    spark.sql(
      """
        |select
        |province_id,
        |province_name,
        |ceil((sum(total_amount ) / sum(total_count))) as provinceavgconsumption,
        |region_id
        |from temp
        |group by province_id,province_name,region_id
        |""".stripMargin).createOrReplaceTempView("t1")

    //  然后将每个区的平均订单金额分析出来
    spark.sql(
      """
        |select
        |region_id as id,
        |region_name,
        |ceil(sum(total_amount)  /  sum(total_count)  ) as regionavgconsumption
        |from temp
        |group by region_id,region_name
        |""".stripMargin).createOrReplaceTempView("t2")

    //  将两张表合并，并且比较大小
    val result=spark.sql(
      """
        |select
        |province_id,
        |province_name,
        |provinceavgconsumption,
        |region_id,
        |region_name,
        |regionavgconsumption,
        |if(provinceavgconsumption > regionavgconsumption,"高",if(provinceavgconsumption < regionavgconsumption,"低","相同")) as comparison
        |from t1
        |join t2 on t1.region_id=t2.id
        |""".stripMargin)

    //  这里将表的名字命名为provinceavgcmpregion,写入mysql
    result.write.mode("overwrite")
      .jdbc("jdbc:mysql://192.168.40.110:3306/ds03?useSSL=false","provinceavgcmpregion",mysql_connect)


    println("运行成功")

    //  关闭环境
    spark.close()
  }

}
