package DianShang_2024.ds_03.clean

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{col, current_timestamp, date_format, lit, to_timestamp, when}

import java.text.SimpleDateFormat
import java.util.Date

object clean05 {
  def main(args: Array[String]): Unit = {
    /*
        5、将ods库中order_info表昨天的分区（子任务一生成的分区）数据抽取到dwd库中fact_order_info的动态分区表，分区字段为etl_date，类型
        为String，取create_time值并将格式转换为yyyyMMdd，同时若operate_time为空，则用create_time填充，并添加dwd_insert_user、
        dwd_insert_time、dwd_modify_user、dwd_modify_time四列，其中dwd_insert_user、dwd_modify_user均填写“user1”，
        dwd_insert_time、dwd_modify_time均填写当前操作时间，并进行数据类型转换。使用hive cli执行show partitions dwd.fact_order_info命
        令，将结果截图粘贴至客户端桌面【Release\任务B提交结果.docx】中对应的任务序号下；
     */
  //  准备环境
    val spark=SparkSession.builder()
    .master("local[*]")
    .appName("数据清洗第五题")
    .config("hive.exec.dynamic.partition.mode","nonstrict")
    .enableHiveSupport()
    .getOrCreate()

    spark.sql("use dwd03")

    //  由于在更改create_time的数据类型前就插入过数据，所以导致更改了数据类型，插入的数据依旧为null，所以干脆下面重新建表
    spark.sql("drop table if exists fact_order_info")
    spark.sql(
      """
        |create table if not exists dwd03.fact_order_info(
        |id int,
        |consignee string,
        |consignee_tel string,
        |final_total_amount decimal(16,2),
        |order_status string,
        |user_id int,
        |delivery_address string,
        |order_comment string,
        |out_trade_no string,
        |trade_body string,
        |create_time string,
        |operate_time timestamp,
        |expire_time timestamp,
        |tracking_no string,
        |parent_order_id int,
        |img_url string,
        |province_id int,
        |benefit_reduce_amount decimal(16,2),
        |original_total_amount decimal(16,2),
        |feight_fee decimal(16,2),
        |dwd_insert_user string,
        |dwd_insert_time timestamp,
        |dwd_modify_user string,
        |dwd_modify_time timestamp
        |)
        |partitioned  by(etl_date string)
        |""".stripMargin)

    //  1.sql的方式

    //  直接读取到ods数据进行处理
    //  这里要注意的是要将yyyy-MM-dd HH:mm:ss转化成yyyyMMdd的形式要像下面一样使用两层date_format,才可以变成yyyyMMdd
    //  然后想要将
     spark.sql(
       """
         |select
         |id,
         |consignee,
         |consignee_tel,
         |final_total_amount,
         |order_status,
         |user_id,
         |delivery_address,
         |order_comment,
         |out_trade_no,
         |trade_body,
         |date_format(date_format(create_time,"yyyy-MM-dd"),"yyyyMMdd")   as create_time,
         |if(operate_time is null,create_time,operate_time) as operate_time,
         |expire_time,
         |tracking_no,
         |parent_order_id,
         |img_url,
         |province_id,
         |benefit_reduce_amount,
         |original_total_amount,
         |feight_fee,
         |"user1" as dwd_insert_user,
         |cast(to_timestamp(date_format(current_timestamp(),"yyyy-MM-dd HH:mm:ss"),"yyyy-MM-dd HH:mm:ss") as timestamp) as dwd_insert_time,
         |"user1" as dwd_modify_user,
         |cast(to_timestamp(date_format(current_timestamp(),"yyyy-MM-dd HH:mm:ss"),"yyyy-MM-dd HH:mm:ss") as timestamp) as dwd_modify_time,
         |etl_date
         |from ods03.order_info
         |where etl_date='20240101'
         |""".stripMargin).createOrReplaceTempView("result")

    spark.sql("select create_time from result").show

    //  由于使用to_timestamp()转化会自带00:00:00,所以只能更改表格的create_time的数据类型，只有这样才能将非时间格式的数据写进去
   //   命令:alter table fact_order_info change create_time create_time string

    //  插入数据
    spark.sql(
      """
        |insert overwrite table dwd03.fact_order_info
        |partition(etl_date)
        |select * from result
        |""".stripMargin)

    //  2.dataframe的方式
    spark.sql("select * from ods03.order_info")
      .where(col("etl_date")==="20240101")
      .withColumn(
        "operate_time",
        when(col("operate_time").isNull,col("create_time")).otherwise(col("operate_time"))
      )
      .withColumn(
        "create_time",
        date_format(col("create_time"),"yyyyMMdd").cast("string")
      )
      .withColumn("dwd_insert_user",lit("user1"))
      .withColumn(
        "dwd_insert_time",
        lit(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()))
      )
      .withColumn("dwd_modify_user",lit("user1"))
      .withColumn(
        "dwd_modify_time",
        lit(to_timestamp(date_format(current_timestamp(),"yyyy-MM-dd HH:mm:ss"),"yyyy-MM-dd HH:mm:ss"))
      )
      .write.mode("append")
      .format("hive")
      .partitionBy("etl_date")
      .saveAsTable("fact_order_info")


    //  关闭u环境
    spark.close()
  }

}
