package com.xl.competition.modul_b.task3

import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

import java.util.Properties

/**
 * @author: xl
 * @createTime: 2023/11/17 21:24:05
 * @program: com.xl.competition
 * @description: ${description}
 */
object LoadProvinceavgcmpToMysql {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .appName(this.getClass.getName)
      .enableHiveSupport()
      .config("hive.metastore.uris", "thrift://node2:9083")
      .config("spark.sql.parquet.writeLegacyFormat", "true")
      .getOrCreate()

    val dataFrame: DataFrame = spark.sql(
      """
        |select provinceid,
        |       provincename,
        |       provinceavgconsumption,
        |       allprovinceavgconsumption,
        |       case
        |           when provinceavgconsumption > allprovinceavgconsumption then '高'
        |           when provinceavgconsumption < allprovinceavgconsumption then '低'
        |           else '相同' end comparison
        |from (
        |         select province_id       provinceid,
        |                pro.name          provincename,
        |                avg(total_amount) provinceavgconsumption,
        |                (
        |                    select sum(total_amount) / count(province_id)
        |                    from dwd.fact_order_info
        |                )                 allprovinceavgconsumption
        |         from dwd.fact_order_info foi
        |                  join dim.dim_base_province pro
        |                       on province_id = pro.id
        |         where substr(foi.create_time, 0, 6) = '202311'
        |         group by province_id, pro.name
        |     ) t1
        |""".stripMargin)
    val prop = new Properties()
    prop.put("user", "root")
    prop.put("password", "Abc123..")

    dataFrame.write
      .mode(SaveMode.Append)
      .jdbc("jdbc:mysql://node3:3306/shtd_result", "provinceavgcmp", prop)
    spark.stop()
  }
}
