package com.xl.competition.modul_b.task2

import org.apache.spark.sql.SparkSession

/**
 * @author: xl
 * @createTime: 2023/11/16 23:28:12
 * @program: com.xl.competition
 * @description: ${description}
 */
object LoadBaseRegionToDim {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .master("local[*]")
      .appName(this.getClass.getName)
      .enableHiveSupport()
      .config("hive.metastore.uris", "thrift://node2:9083")
      .config("spark.sql.parquet.writeLegacyFormat", "true")
      .getOrCreate()

    spark.sql(
      """
        |insert into table dim.dim_base_region partition (etl_date = '20231116')
        |select if(obr.create_time > nvl(dbr.create_time, cast('1970-01-01 00:00:00' as timestamp)), obr.id, dbr.id)                   as id,
        |       if(obr.create_time > nvl(dbr.create_time, cast('1970-01-01 00:00:00' as timestamp)), obr.region_name, dbr.region_name) as region_name,
        |       if(obr.create_time > nvl(dbr.create_time, cast('1970-01-01 00:00:00' as timestamp)), obr.create_time, dbr.create_time) as create_time,
        |       'user1'                                                                                                                as dwd_insert_user,
        |       nvl(dbr.dwd_insert_time, substr(current_timestamp(), 1, 19))                                                           as dwd_insert_time,
        |       'user1'                                                                                                                as dwd_modify_user,
        |       substr(current_timestamp(), 1, 19)                                                                                     as dwd_modify_time
        |from (
        |         select id,
        |                region_name,
        |                create_time,
        |                dwd_insert_user,
        |                dwd_insert_time,
        |                dwd_modify_user,
        |                dwd_modify_time
        |         from dim.dim_base_region
        |         where etl_date = '20231115'
        |     ) dbr
        |         right join (
        |    select id,
        |           region_name,
        |           create_time,
        |           'user1'                                                dwd_insert_user,
        |           'user1'                                                dwd_modify_user,
        |           from_unixtime(unix_timestamp(), 'yyyy-MM-dd HH:mm:ss') dwd_insert_time,
        |           from_unixtime(unix_timestamp(), 'yyyy-MM-dd HH:mm:ss') dwd_modify_time
        |    from ods.base_region
        |    where etl_date = '20231216'
        |) obr
        |                    on dbr.id = obr.id
        |""".stripMargin)


    spark.stop()
  }
}
