package DianShang_2024.ds_01.clean

import org.apache.spark.sql.SparkSession

object clean05 {
  def main(args: Array[String]): Unit = {
    /*
          将ods库中order_info表昨天的分区（子任务一生成的分区）数据抽取到dwd库中fact_order_info的动态分区表，分区字段为etl_date，类型为String，
          取create_time值并将格式转换为yyyyMMdd，同时若operate_time为空，则用create_time填充，并添加dwd_insert_user、dwd_insert_time、
          dwd_modify_user、dwd_modify_time四列，其中dwd_insert_user、dwd_modify_user均填写“user1”，dwd_insert_time、dwd_modify_time均填
          写当前操作时间，并进行数据类型转换。使用hive cli执行show partitions dwd.fact_order_info命令
     */
    //  准备sparksqsl的环境
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("数据清洗第五题")
      .enableHiveSupport()
      .getOrCreate()

    spark.sql("use dwd")
    //  将hive的分区模式设置为非严格模式(启动动态分区)  dynamic:动态的
    spark.sql("set hive.exec.dynamic.partition.mode=nonstrict")


/*
      直接在插入语句的查询中对ods里面的表进行了修改
 */
    spark.sql(
      """
        |insert into table dwd.fact_order_info
        |partition(etl_date)
        |select
        |id,
        |consignee,
        |consignee_tel,
        |final_total_amount,
        |order_status,
        |user_id,
        |delivery_address,
        |order_comment,
        |out_trade_no,
        |trade_body,
        |to_timestamp(date_format(create_time,'yyyy-MM-dd'),'yyyy-MM-dd') as create_time,
        |if(operate_time is null ,create_time,operate_time) as operate_time,
        |expire_time,
        |tracking_no,
        |parent_order_id,
        |img_url,
        |province_id,
        |benefit_reduce_amount,
        |original_total_amount,
        |feight_fee,
        |'user1' as dwd_insert_user,
        |to_timestamp(date_format(current_timestamp(),'yyyy-MM-dd HH:mm:ss'),'yyyy-MM-dd HH:mm:ss') as dwd_insert_time,
        |'user1' as dwd_modify_user,
        |to_timestamp(date_format(current_timestamp(),'yyyy-MM-dd HH:mm:ss'),'yyyy-MM-dd HH:mm:ss') as dwd_modify_time,
        |etl_date
        |from ods.order_info2
        |where etl_date='20231017'
        |""".stripMargin)


    //  关闭sparksql的环境
    spark.close()
  }

}
