package com.xl.competition.modul_b.task2

import org.apache.spark.sql.SparkSession

/**
 * @author: xl
 * @createTime: 2023/11/16 23:25:28
 * @program: com.xl.competition
 * @description: ${description}
 */
object LoadBaseProvinceToDIm {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .master("local[*]")
      .appName(this.getClass.getName)
      .enableHiveSupport()
      .config("hive.metastore.uris", "thrift://node2:9083")
      .config("spark.sql.parquet.writeLegacyFormat", "true")
      .getOrCreate()

    spark.sql(
      """
        |with dbp as (
        |    select id,
        |           name,
        |           region_id,
        |           area_code,
        |           iso_code,
        |           iso_3166_2,
        |           create_time,
        |           dwd_insert_user,
        |           dwd_insert_time,
        |           dwd_modify_user,
        |           dwd_modify_time
        |    from dim.dim_base_province
        |    where etl_date = '20231112'
        |),
        |     obp as (
        |         select id,
        |                name,
        |                region_id,
        |                area_code,
        |                iso_code,
        |                iso_3166_2,
        |                create_time,
        |                'user1'                                                dwd_insert_user,
        |                'user1'                                                dwd_modify_user,
        |                from_unixtime(unix_timestamp(), 'yyyy-MM-dd HH:mm:ss') dwd_insert_time,
        |                from_unixtime(unix_timestamp(), 'yyyy-MM-dd HH:mm:ss') dwd_modify_time
        |         from ods.base_province
        |         where etl_date = '20231113'
        |     )
        |insert into table dim.dim_base_province partition (etl_date = '20231113')
        |select `if`(obp.create_time > nvl(dbp.create_time, cast('1970-01-01 00:00:0' as timestamp)), obp.id, dbp.id)                   as id,
        |       `if`(obp.create_time > nvl(dbp.create_time, cast('1970-01-01 00:00:0' as timestamp)), obp.name, dbp.name)               as name,
        |       `if`(obp.create_time > nvl(dbp.create_time, cast('1970-01-01 00:00:0' as timestamp)), obp.region_id, dbp.region_id)     as region_id,
        |       `if`(obp.create_time > nvl(dbp.create_time, cast('1970-01-01 00:00:0' as timestamp)), obp.area_code, dbp.area_code)     as area_code,
        |       `if`(obp.create_time > nvl(dbp.create_time, cast('1970-01-01 00:00:0' as timestamp)), obp.iso_code, dbp.iso_code)       as iso_code,
        |       `if`(obp.create_time > nvl(dbp.create_time, cast('1970-01-01 00:00:0' as timestamp)), obp.iso_3166_2, dbp.iso_3166_2)   as iso_3166_2,
        |       `if`(obp.create_time > nvl(dbp.create_time, cast('1970-01-01 00:00:0' as timestamp)), obp.create_time, dbp.create_time) as create_time,
        |       'user1'                                                                                                                 as dwd_insert_user,
        |       nvl(dbp.dwd_insert_time, substr(current_timestamp(), 1, 19))                                                            as dwd_insert_time,
        |       'user1'                                                                                                                 as dwd_modify_user,
        |       substr(current_timestamp(), 1, 19)                                                                                      as dwd_modify_time
        |from obp
        |         left join dbp
        |                   on obp.id = dbp.id
        |""".stripMargin)


    spark.stop()
  }
}
