package DianShang_2024.ds_07.clean

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{col, lit, to_timestamp}

import java.text.SimpleDateFormat
import java.util.Calendar

object table_perparation {
  def main(args: Array[String]): Unit = {
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("表格准备")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
//      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
//      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    // 在hive或者sparksql的客户端创建数据库
    spark.sql("use dwd07")

    //  dim_user_info
//    spark.sql("drop table if exists dim_user_info")
//    spark.sql(
//      """
//        |create table if not exists dim_user_info(
//        |id int,
//        |login_name string,
//        |nick_name string,
//        |passwd string,
//        |name string,
//        |phone_num string,
//        |email string,
//        |head_img string,
//        |user_level string,
//        |birthday timestamp,
//        |gender string,
//        |create_time timestamp,
//        |operate_time timestamp,
//        |dwd_insert_user string,
//        |dwd_insert_time timestamp,
//        |dwd_modify_user string,
//        |dwd_modify_time timestamp
//        |)
//        |partitioned by(etl_date string)
//        |""".stripMargin)
//    // dim_sku_info
//    spark.sql("drop table if exists dim_sku_info")
//    spark.sql(
//      """
//        |create table if not exists dim_sku_info(
//        |id int,
//        |spu_id int,
//        |price decimal(10,0),
//        |sku_name string,
//        |sku_desc string,
//        |weight decimal(10,2),
//        |tm_id int,
//        |category3_id int,
//        |sku_default_img string,
//        |create_time timestamp,
//        |dwd_insert_user string,
//        |dwd_insert_time timestamp,
//        |dwd_modify_user string,
//        |dwd_modify_time timestamp
//        |)
//        |partitioned by(etl_date string)
//        |""".stripMargin)
//    //  dim_province
//    spark.sql("drop table if exists dim_province")
//    spark.sql(
//      """
//        |create table if not exists dim_province(
//        |id int,
//        |name string,
//        |region_id string,
//        |area_code string,
//        |iso_code string,
//        |create_time timestamp,
//        |dwd_insert_user string,
//        |dwd_insert_time timestamp,
//        |dwd_modify_user string,
//        |dwd_modify_time timestamp
//        |)
//        |partitioned by(etl_date string)
//        |""".stripMargin)
//    //  dim_region
//    spark.sql("drop table if exists dim_region")
//    spark.sql(
//      """
//        |create table if not exists dim_region(
//        |id string,
//        |region_name string,
//        |create_time timestamp,
//        |dwd_insert_user string,
//        |dwd_insert_time timestamp,
//        |dwd_modify_user string,
//        |dwd_modify_time timestamp
//        |)
//        |partitioned by(etl_date string)
//        |""".stripMargin)
    //  fact_order_info
    spark.sql("drop table if exists fact_order_info")
    spark.sql(
      """
        |create table if not exists fact_order_info(
        |id int,
        |consignee string,
        |consignee_tel string,
        |final_total_amount decimal(16,2),
        |order_status string,
        |user_id int,
        |delivery_address string,
        |order_comment string,
        |out_trade_no string,
        |trade_body string,
        |create_time timestamp,
        |operate_time timestamp,
        |expire_time timestamp,
        |tracking_no string,
        |parent_order_id int,
        |img_url string,
        |province_id int,
        |benefit_reduce_amount decimal(16,2),
        |original_total_amount decimal(16,2),
        |feight_fee decimal(16,2),
        |dwd_insert_user string,
        |dwd_insert_time timestamp,
        |dwd_modify_user string,
        |dwd_modify_time timestamp
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)
    //  fact_order_detail
//    spark.sql("drop table if exists fact_order_detail")
//    spark.sql(
//      """
//        |create table if not exists fact_order_detail(
//        |id int,
//        |order_id int,
//        |sku_id int,
//        |sku_name string,
//        |img_url string,
//        |order_price decimal(10,2),
//        |sku_num string,
//        |create_time timestamp,
//        |source_type string,
//        |source_id int,
//        |dwd_insert_user string,
//        |dwd_insert_time timestamp,
//        |dwd_modify_user string,
//        |dwd_modify_time timestamp
//        |)
//        |partitioned by(etl_date string)
//        |""".stripMargin)

    spark.sql("show tables").show

    val day:Calendar=Calendar.getInstance()
    val current_time=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(day.getTime)
    day.add(Calendar.DATE,-1)
    val yesterday=new SimpleDateFormat("yyyyMMdd").format(day.getTime)

    def to_dwd(ods_name:String,dwd_name:String):Unit={
      spark.table(s"ods07.${ods_name}").createOrReplaceTempView("temp01")
        spark.table(s"ods07.${ods_name}")
        .where("etl_date=(select max(etl_date) from temp01)")
        .drop("etl_date")
        .withColumn("dwd_insert_user",lit("user1"))
        .withColumn("dwd_insert_time",to_timestamp(lit(current_time)))
        .withColumn("dwd_modify_user",lit("user1"))
        .withColumn("dwd_modify_time",to_timestamp(lit(current_time)))
        .withColumn("etl_date",lit(yesterday))
        .write.mode("append")
        .format("hive")
        .partitionBy("etl_date")
        .saveAsTable(s"dwd07.${dwd_name}")
    }

//    to_dwd("user_info","dim_user_info")
//    to_dwd("sku_info","dim_sku_info")
//    to_dwd("base_province","dim_province")
//    to_dwd("base_region","dim_region")



    spark.close()
  }

}
