package DianShang_2024.ds_server.clean

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{current_timestamp, date_format, lit, to_timestamp}

object table_data_preparation {
  def main(args: Array[String]): Unit = {
    /*
        提示：若完成了实时计算的任务二中的第3小题后，可以将HBase中备份的数据与ods中的离线数据进行合并，抽取到dwd（需自建）的对应表中。

    编写Hive SQL或者Spark Sql代码，将ods库中相应表数据（经过数据抽取得数据）抽取到Hive的dwd库中对应表中。表中有涉及到timestamp类型的，均要求按照
    yyyy-MM-dd HH:mm:ss，不记录毫秒数，若与日期有关的数据，必须转为timestamp，若原数据中只有年月日，则在时分秒的位置添加00:00:00，添加之后使其符合
    yyyy-MM-dd HH:mm:ss。
     */

    //  准备sparksql的环境
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("数据清洗准备数据")
      .enableHiveSupport()
      .getOrCreate()

    spark.sql("create database if not exists dwd_server")
    spark.sql("use dwd_server")
    spark.sql("show databases").show

    //  定义创建表格的函数
    def create_table():Unit={
      //  dim_customer_inf
      spark.sql(
        """
          |create table if not exists dwd_server.dim_customer_inf(
          |customer_inf_id int,
          |customer_id int,
          |customer_name string,
          |identity_card_type int,
          |identity_card_no string,
          |mobile_phone string,
          |customer_email string,
          |gender string,
          |customer_point int,
          |register_time timestamp,
          |birthday timestamp,
          |customer_level int,
          |customer_money decimal(8,2),
          |modified_time timestamp,
          |dwd_insert_user string,
          |dwd_insert_time timestamp,
          |dwd_modify_user string,
          |dwd_modify_time timestamp
          |)partitioned by(etl_date string)
          |""".stripMargin)

      //  dim_product_info
      spark.sql(
        """
          |create table if not exists dwd_server.dim_product_info(
          |product_id int,
          |product_core string,
          |product_name string,
          |bar_code string,
          |brand_id int,
          |one_category_id int,
          |two_category_id int,
          |three_category_id int,
          |supplier_id int,
          |price decimal(8,2),
          |average_cost decimal(18,2),
          |publish_status int,
          |audit_status int,
          |weight double,
          |length double,
          |height double,
          |width double,
          |color_type string,
          |production_date timestamp,
          |shelf_life int,
          |descript string,
          |indate timestamp,
          |modified_time timestamp,
          |dwd_insert_user string,
          |dwd_insert_time timestamp,
          |dwd_modify_user string,
          |dwd_modify_time timestamp
          |)partitioned by(etl_date string)
          |""".stripMargin)

      //  fact_order_master
      spark.sql(
        """
          |create table if not exists dwd_server.fact_order_master(
          |order_id int,
          |order_sn string,
          |customer_id int,
          |shipping_user string,
          |province string,
          |city string,
          |address string,
          |order_source int,
          |payment_method int,
          |order_money decimal(8,2),
          |district_money decimal(8,2),
          |shipping_money decimal(8,2),
          |payment_money decimal(8,2),
          |shipping_comp_name string,
          |shipping_sn string,
          |create_time string,
          |shipping_time string,
          |pay_time string,
          |receive_time string,
          |order_status string,
          |order_point int,
          |invoice_title string,
          |modified_time timestamp,
          |dwd_insert_user string,
          |dwd_insert_time timestamp,
          |dwd_modify_user string,
          |dwd_modify_time timestamp
          |)partitioned by(etl_date string)
          |""".stripMargin)

      //  fact_order_detail
      spark.sql(
        """
          |create table if not exists dwd_server.fact_order_detail(
          |order_detail_id int,
          |order_sn string,
          |product_id int,
          |product_name string,
          |product_cnt int,
          |product_price decimal(8,2),
          |average_cost decimal(8,2),
          |weight double,
          |fee_money decimal(8,2),
          |w_id int,
          |create_time string,
          |modified_time timestamp,
          |dwd_insert_user string,
          |dwd_insert_time timestamp,
          |dwd_modify_user string,
          |dwd_modify_time timestamp
          |)partitioned by(etl_date string)
          |""".stripMargin)
    }

    //  在dwd_server数据库里面创建表
    create_table()

    //  定义读取ods_server数据库里面表的函数(并且创建需要用到的四个列)
    def read_hive(table_name:String,view_name:String):Unit={
      spark.sql(
        s"""
          |select
          |*
          |from ods_server.$table_name
          |""".stripMargin)
        .drop("etl_date")
        .withColumn("dwd_insert_user",lit("user1"))
        .withColumn(
          "dwd_insert_time",
          to_timestamp(date_format(current_timestamp,"yyyy-MM-dd HH:mm:ss"),"yyyy-MM-dd HH:mm:ss")
        )
        .withColumn("dwd_modify_user",lit("user1"))
        .withColumn(
          "dwd_modify_time",
          to_timestamp(date_format(current_timestamp(),"yyyy-MM-dd HH:mm:ss"),"yyyy-MM-dd HH:mm:ss")
        )
        .createOrReplaceTempView(s"$view_name")
    }

    //  定义将数据写入hive的函数
    def write_hive(table_name:String,view_name:String):Unit={
      spark.sql(
        s"""
          |insert into table dwd_server.$table_name
          |partition(etl_date='20240311')
          |select
          |*
          |from $view_name
          |""".stripMargin)
    }

    //  将数据加载到dwd_server数据库(由于第三四题的需要使用动态分区，所以这里只需要对第一二提的表进行加载)
    read_hive("customer_inf01","customer_inf_temp")
    write_hive("dim_customer_inf","customer_inf_temp")

    read_hive("product_info01","product_info_temp")
    write_hive("dim_product_info","product_info_temp")






    //  关闭sparksql的环境
    spark.close()
  }

}
