package DianShang_2024.ds_03.clean

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{col, lit}

import java.text.SimpleDateFormat
import java.util.Date

object data_table_preparation {
  def main(args: Array[String]): Unit = {
    /*
        由于order_info和order_detail两个表的题目不需要进行数据合并，所以准备表就不需要给他们准备原始数据了，到时候后面做题目，只需要更具
        题目要求将数据插入就可以了，不需要使用窗口函数进行合并
     */

    //  准备环境
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("准备表和数据")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
      .enableHiveSupport()
      .getOrCreate()

    spark.sql("use dwd03")

    //  dim_user_info
    spark.sql("drop table if exists dwd03.dim_user_info")
    spark.sql(
      """
        |create table if not exists dwd03.dim_user_info(
        |id int,
        |login_name string,
        |nick_name string,
        |passwd string,
        |name string,
        |phone_num string,
        |email string,
        |head_img string,
        |user_level string,
        |birthday timestamp,
        |gender string,
        |create_time timestamp,
        |operate_time timestamp,
        |dwd_insert_user String,
        |dwd_insert_time timestamp,
        |dwd_modify_user string,
        |dwd_modify_time timestamp
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)

    //  dim_sku_info
    spark.sql("drop table if exists dwd03.dim_sku_info")
    spark.sql(
      """
        |create table if not exists dwd03.dim_sku_info(
        |id int,
        |spu_id int,
        |price decimal(10,0),
        |sku_name string,
        |sku_desc string,
        |weight decimal(10,2),
        |tm_id int,
        |category3_id int,
        |sku_default_img string,
        |create_time timestamp,
        |dwd_insert_user string,
        |dwd_insert_time timestamp,
        |dwd_modify_user string,
        |dwd_modify_time timestamp
        |)
        |partitioned  by(etl_date string)
        |""".stripMargin)

    //  dim_province
    spark.sql("drop table if exists dwd03.dim_province")
    spark.sql(
      """
        |create table if not exists dwd03.dim_province(
        |id int,
        |name string,
        |region_id string,
        |area_code string,
        |iso_code string,
        |create_time timestamp,
        |dwd_insert_user string,
        |dwd_insert_time timestamp,
        |dwd_modify_user string,
        |dwd_modify_time timestamp
        |)
        |partitioned  by(etl_date string)
        |""".stripMargin)

    //  dim_region
    spark.sql("drop table if exists dwd03.dim_region")
    spark.sql(
      """
        |create table if not exists dwd03.dim_region(
        |id string,
        |region_name string,
        |create_time timestamp,
        |dwd_insert_user string,
        |dwd_insert_time timestamp,
        |dwd_modify_user string,
        |dwd_modify_time timestamp
        |)
        |partitioned  by(etl_date string)
        |""".stripMargin)

    //  fact_order_info
    spark.sql("drop table if exists fact_order_info")
    spark.sql(
      """
        |create table if not exists dwd03.fact_order_info(
        |id int,
        |consignee string,
        |consignee_tel string,
        |final_total_amount decimal(16,2),
        |order_status string,
        |user_id int,
        |delivery_address string,
        |order_comment string,
        |out_trade_no string,
        |trade_body string,
        |create_time timestamp,
        |operate_time timestamp,
        |expire_time timestamp,
        |tracking_no string,
        |parent_order_id int,
        |img_url string,
        |province_id int,
        |benefit_reduce_amount decimal(16,2),
        |original_total_amount decimal(16,2),
        |feight_fee decimal(16,2),
        |dwd_insert_user string,
        |dwd_insert_time timestamp,
        |dwd_modify_user string,
        |dwd_modify_time timestamp
        |)
        |partitioned  by(etl_date string)
        |""".stripMargin)

  //  fact_order_detail
    spark.sql("drop table if exists dwd03.fact_order_")
    spark.sql(
      """
        |create table if not exists dwd03.fact_order_detail(
        |id int,
        |order_id int,
        |sku_id int,
        |sku_name string,
        |img_url string,
        |order_price decimal(10,2),
        |sku_num string,
        |create_time timestamp,
        |source_type string,
        |source_id int,
        |dwd_insert_user string,
        |dwd_insert_time timestamp,
        |dwd_modify_user string,
        |dwd_modify_time timestamp
        |)
        |partitioned  by(etl_date string)
        |""".stripMargin)

    //  检查表是否创建成功
    spark.sql("show tables").show

    //  定义将ods层数据写入到dwd层的函数
    def  write_dwd(ods_table:String,dwd_table:String):Unit={
            //  这里的etl_date='20240101'的值是旧分区的值
            spark.sql(s"select * from ods03.${ods_table}  where etl_date='20240101'")
              .drop("etl_date")
              .withColumn("dwd_insert_user",lit("user1"))
              .withColumn(
                "dwd_insrt_time",
                lit(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date())).cast("timestamp")
              )
              .withColumn("dwd_modify_user",lit("user1"))
              .withColumn(
                "dwd_modify_time",
                lit(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date())).cast("timestamp")
              )
              .createOrReplaceTempView("temp")



            //  这里的etl_date='20240101'写的是新分区的值
            spark.sql(
              s"""
                |insert overwrite table dwd03.${dwd_table}
                |partition(etl_date='20240101')
                |select * from temp
                |""".stripMargin)

    }

    //  将数据写入
     write_dwd("user_info","dim_user_info")
     write_dwd("sku_info","dim_sku_info")
    write_dwd("base_province","dim_province")
    write_dwd("base_region","dim_region")






    //  关闭环境
    spark.close()
  }

}
