package DianShang_2024.ds_03.indicator

import org.apache.spark.sql.SparkSession

import java.util.Properties

object trait04 {
  def main(args: Array[String]): Unit = {
    /*
          4、根据dws层表来计算每个地区2020年订单金额前3省份，依次存入MySQL数据库shtd_result的regiontopthree表中（表结构如下），然后在
          Linux的MySQL命令行中根据地区表主键升序排序，查询出前5条，将SQL语句复制粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务
          序号下，将执行结果截图粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下；
     */
    //  准备环境
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("指标计算第四题")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .enableHiveSupport()
      .getOrCreate()

    //  准备连接mysql的配置
    val mysql_connect=new   Properties()
    mysql_connect.setProperty("user","root")
    mysql_connect.setProperty("password","123456")
    mysql_connect.setProperty("driver","com.mysql.jdbc.Driver")

    //  先读取到数据
    spark.sql(
      """
        |select
        |*
        |from(
        |select distinct * from dws03.province_consumption_day_aggr
        |where year='2020'
        |) as t1
        |""".stripMargin).createOrReplaceTempView("temp")

    //  concat_ws():第一个参数是以什么为分隔符，拼接，第二个参数为拼接的数据
    //  collect_list():将一个列的值拼接到一起
    val result=spark.sql(
      """
        |select
        |region_id,
        |region_name,
        |concat_ws(",",collect_list(province_id)) as provinceids,
        |concat_ws(",",collect_list(province_name)) as provincenames,
        |concat_ws(",",collect_list(total_amount)) as provinceamount
        |from(
        |select
        |*
        |from(
        |select
        |region_id,
        |region_name,
        |province_id,
        |province_name,
        |cast(total_amount as int),
        |row_number() over(partition by region_id,region_name order by total_amount desc ) as row
        |from temp
        |) as t1
        |where row < 4
        |) as t2
        |group by region_id,region_name
        |""".stripMargin)

    //    将数据写入mysql
    result.write.mode("overwrite")
      .jdbc("jdbc:mysql://192.168.40.110:3306/ds03?useSSL=false","regiontopthree",mysql_connect)



    // 关闭环境
    spark.close()
  }

}
