package DianShang_2024.ds_01.clean

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{current_timestamp, date_format, lit, to_timestamp}

import java.text.SimpleDateFormat
import java.util.Date

object data_preparation {
  def main(args: Array[String]): Unit = {
    /*
          编写Scala代码，使用Spark将ods库中相应表数据全量抽取到Hive的dwd库中对应表中。表中有涉及到timestamp类型的，均要求按照
          yyyy-MM-dd HH:mm:ss，不记录毫秒数，若原数据中只有年月日，则在时分秒的位置添加00:00:00，添加之后使其符合yyyy-MM-dd HH:mm:ss。
     */

    //  准备sparksql的环境
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("数据清洗准备表格数据")
      .enableHiveSupport()
      .getOrCreate()

    //    创建使用数据库
    spark.sql("create database if not exists dwd")
    spark.sql("use dwd")
    spark.sql("show databases").show

    //  定义创建表的函数
    def create_table():Unit={
      //  dim_user_info
      spark.sql(
        """
          |create table if not exists dwd.dim_user_info(
          |id int,
          |login_name string,
          |nick_name string,
          |passwd string,
          |name string,
          |phone_num string,
          |email string,
          |head_img string,
          |user_level string,
          |birthday timestamp,
          |gender string,
          |create_time timestamp,
          |operate_time timestamp,
          |dwd_insert_user string,
          |dwd_insert_time timestamp,
          |dwd_modify_user string,
          |dwd_modify_time timestamp
          |)
          |partitioned by(etl_date string)
          |""".stripMargin)

      //  dim_sku_info
      spark.sql(
        """
          |create table if not exists dwd.dim_sku_info(
          |id int,
          |spu_id int,
          |price decimal(10,0),
          |sku_name string,
          |sku_desc string,
          |weight decimal(10,2),
          |tm_id int,
          |category3_id int,
          |sku_default_img string,
          |create_time timestamp,
          |dwd_insert_user string,
          |dwd_insert_time timestamp,
          |dwd_modify_user string,
          |dwd_modify_time timestamp
          |)
          |partitioned by(etl_date string)
          |""".stripMargin)

      //  dim_province
      spark.sql(
        """
          |create table if not exists dwd.dim_province(
          |id int,
          |name string,
          |region_id string,
          |area_code string,
          |iso_code string,
          |create_time timestamp,
          |dwd_insert_user string,
          |dwd_insert_time timestamp,
          |dwd_modify_user string,
          |dwd_modify_time timestamp
          |)
          |partitioned by(etl_date string)
          |""".stripMargin)

      //  dim_region
      spark.sql("drop table if exists dwd.dim_region")
      spark.sql(
        """
          |create table if not exists dwd.dim_region(
          |id string,
          |region_name string,
          |create_time timestamp,
          |dwd_insert_user string,
          |dwd_insert_time timestamp,
          |dwd_modify_user string,
          |dwd_modify_time timestamp
          |)
          |partitioned by(etl_date string)
          |""".stripMargin)

      //  fact_order_info
      spark.sql(
        """
          |create table if not exists dwd.fact_order_info(
          |id int,
          |consignee string,
          |consignee_tel string,
          |final_total_amount decimal(16,2),
          |order_status string,
          |user_id int,
          |delivery_address string,
          |order_comment string,
          |out_trade_no string,
          |trade_body string,
          |create_time timestamp,
          |orerate_time timestamp,
          |expire_time timestamp,
          |tracking_no string,
          |parent_order_id int,
          |img_url string,
          |province_id int,
          |benefit_reduce_amount decimal(16,2),
          |original_total_amount decimal(16,2),
          |feight_fee decimal(16,2),
          |dwd_insert_user string,
          |dwd_insert_time timestamp,
          |dwd_modify_user string,
          |dwd_modify_time timestamp
          |)
          |partitioned by(etl_date string)
          |""".stripMargin)

      //  fact_order_detail
      spark.sql(
        """
          |create table if not exists dwd.fact_order_detail(
          |id int,
          |order_id int,
          |sku_id int,
          |sku_name string,
          |img_url string,
          |order_price decimal(10,2),
          |sku_num string,
          |create_time timestamp,
          |source_type string,
          |source_id int,
          |dwd_insert_user string,
          |dwd_insert_time timestamp,
          |dwd_modify_user string,
          |dwd_modify_time timestamp
          |)
          |partitioned by(etl_date string)
          |""".stripMargin)

    }

    //   创建表格
    create_table()

    /*
            1.new SimpleDateFormat(定义时间的格式).format(需要进行格式化的时间)
            2.to_timestamp(）:按照指定格式将字符串转化为时间戳类型
              date_format():按照指定格式将时间转化为字符串
     */

    //  定义函数从ods数据库读取数据并添加新的字段和数据
    def read_hive(table_name:String,view_table:String):Unit={
      spark.sql(s"select  * from ods.$table_name where etl_date='20231017'")
        .drop("etl_date")                     //  删除分区列
        .withColumn("dwd_insert_user",lit("user1"))
        .withColumn(
          "dwd_insert_time",
          lit(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date())).cast("timestamp")
        )
        .withColumn("dwd_modify_user",lit("user1"))
        .withColumn(
          "dwd_modify_time",
          to_timestamp( date_format(current_timestamp(),"yyyy-MM-dd HH:mm:ss"),"yyyy-MM-dd HH:mm:ss" )
        )
        .createOrReplaceTempView(s"$view_table")
    }


    //  定义函数将数据写入hive的dwd数据库
    def write_hive(table_name:String,view_name:String):Unit={
      spark.sql(
        s"""
          |insert into table dwd.$table_name
          |partition(etl_date='20231017')
          |select * from $view_name
          |""".stripMargin)
    }

    //  利用函数将数据写入进去
    read_hive("user_info2","user_info_temp")
    write_hive("dim_user_info","user_info_temp")

    read_hive("sku_info2","sku_info_temp")
    write_hive("dim_sku_info","sku_info_temp")

    read_hive("base_province2","base_province_temp")
    write_hive("dim_province","base_province_temp")

    read_hive("base_region2","base_region_temp")
    write_hive("dim_region","base_region_temp")


    //  关闭sparksql环境
    spark.close()
  }

}
