package com.xl.competition.modul_b.task3

import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

import java.util.Properties

/**
 * @author: xl
 * @createTime: 2023/11/17 20:44:15
 * @program: com.xl.competition
 * @description: ${description}
 */
object LoadOrderToMysql {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .appName(this.getClass.getName)
      .enableHiveSupport()
      .config("hive.metastore.uris", "thrift://node2:9083")
      .config("spark.sql.parquet.writeLegacyFormat", "true")
      .getOrCreate()

    val dataFrame: DataFrame = spark.sql(
      """
        |with area as (
        |    select pro.id          provinceid,
        |           pro.name        provincename,
        |           reg.id          regionid,
        |           reg.region_name regionname
        |    from dim.dim_base_province pro
        |    join dim.dim_base_region reg
        |           on pro.region_id = reg.id
        |),
        |     ori as (
        |         select id,
        |                total_amount,
        |                province_id,
        |                create_time
        |         from dwd.fact_order_info
        |     )
        |select provinceid,
        |       provincename,
        |       regionid,
        |       regionname,
        |       cast(sum(total_amount) as double) totalconsumption,
        |       count(id)                         totalorder,
        |       --20231116
        |       substr(create_time, 0, 4)         year,
        |       substr(create_time, 5)            month
        |from ori
        |join area
        |       on province_id = provinceid
        |group by provinceid, provincename, regionid, regionname, substr(create_time, 0, 4), substr(create_time, 5)
        |""".stripMargin)

    val prop = new Properties()
    prop.put("user", "root")
    prop.put("password", "Abc123..")

    dataFrame.write
      .mode(SaveMode.Append)
      .jdbc("jdbc:mysql://node3:3306/shtd_result", "provinceeverymonth", prop)

    spark.stop()
  }
}
