package com.xl.competition.modul_b.task2

import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

/**
 * @author: xl
 * @createTime: 2023/11/16 23:30:07
 * @program: com.xl.competition
 * @description: ${description}
 */
object LoadOrderDetailToDwd {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession
      .builder()
      .master("local[*]")
      .appName(this.getClass.getName)
      .enableHiveSupport()
      .config("hive.metastore.uris", "thrift://node2:9083")
      .config("spark.sql.parquet.writeLegacyFormat", "true")
      .getOrCreate()

    val orderDetailDF: DataFrame = spark.sql(
      """
        |select id,
        |       order_id,
        |       sku_id,
        |       sku_name,
        |       img_url,
        |       order_price,
        |       sku_num,
        |       from_unixtime(unix_timestamp(create_time),'yyyyMMdd') as  create_time,
        |       from_unixtime(unix_timestamp(create_time),'yyyyMMdd') as  etl_date,
        |       source_type,
        |       source_id,
        |       split_total_amount,
        |       split_activity_amount,
        |       split_coupon_amount,
        |       'user1'                            dwd_insert_user,
        |       substr(current_timestamp(), 1, 19) dwd_insert_time,
        |       'user1'                            dwd_modify_user,
        |       substr(current_timestamp(), 1, 19) dwd_modify_time
        |from ods.order_detail
        |where etl_date = '20231117'
        |""".stripMargin)

    orderDetailDF
      .write
      .mode(SaveMode.Append)
      .partitionBy("etl_date")
      .format("hive")
      .saveAsTable("dwd.fact_order_detail")

    spark.stop()
  }
}
