package DianShang_2024.ds_07.clean

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{current_timestamp, date_format, lit, to_timestamp}

import java.text.SimpleDateFormat
import java.util.Calendar

object clean02 {
  def main(args: Array[String]): Unit = {
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("数据清洗第二题")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()


    spark.sql("use dwd07")

    val day:Calendar=Calendar.getInstance()
    val current_time=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(day.getTime)
    day.add(Calendar.DATE,-1)
    val yesterday=new SimpleDateFormat("yyyyMMdd").format(day.getTime)

    //  这里是选择最大的分区，但是比赛的时候肯定是选择昨天的分区
    spark.table("ods07.sku_info").createOrReplaceTempView("temp01")
    spark.table("ods07.sku_info")
      .where("etl_date=(select max(etl_date) from temp01)")
      .drop("etl_date")
      .withColumn("dwd_insert_user",lit("user1"))
      .withColumn("dwd_insert_time",to_timestamp(lit(current_time)))
      .withColumn("dwd_modify_user",lit("user1"))
      .withColumn(
        "dwd_modify_time",
        to_timestamp(date_format(current_timestamp(),"yyyy-MM-dd HH:mm:ss"))
      )
      .createOrReplaceTempView("sku_info")

    spark.sql(
      """
        |select * from dwd07.dim_sku_info
        |where etl_date=(select max(etl_date) from dwd07.dim_sku_info)
        |""".stripMargin)
      .drop("etl_date")
      .createOrReplaceTempView("dwd")

    spark.sql(
      s"""
        |insert into table dwd07.dim_sku_info
        |partition(etl_date='${yesterday}')
        |select
        |id,spu_id,price,sku_name,sku_desc,weight,tm_id,category3_id,sku_default_img,create_time,
        |dwd_insert_user,dwd_insert_time,dwd_modify_user,dwd_modify_time
        |from(
        |select
        |id,spu_id,price,sku_name,sku_desc,weight,tm_id,category3_id,sku_default_img,create_time,
        |dwd_insert_user,
        |min(dwd_insert_time) over(partition by id) as dwd_insert_time,
        |dwd_modify_user,
        |max(dwd_modify_time) over(partition by id) as dwd_modify_time,
        |row_number() over(partition by id order by create_time desc) as row
        |from(
        |select * from dwd
        |union all
        |select * from sku_info
        |)as r1
        |) as r2
        |where row=1
        |""".stripMargin)


    /*
        select id,sku_desc,dwd_insert_user,dwd_modify_time,etl_date from dwd07.dim_sku_info
        where etl_date=(select max(etl_date) from dwd07.dim_sku_info) and  15 <= id and id<=20;
     */

    spark.close()
  }

}
