package DianShang_2024.ds_03.extract

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.lit

import java.text.SimpleDateFormat
import java.util.{Date, Properties}

object table_data_preparation {
  def main(args: Array[String]): Unit = {
    //  准备环境
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("准备表和数据")
//      .config("hive.exec.dynamic.partition.mode","nonstrict")           //  动态分区，用不上可以不写
      .enableHiveSupport()
      .getOrCreate()

    //  准备连接mysql的配置
    val mysql_connect=new Properties()
    mysql_connect.setProperty("user","root")
    mysql_connect.setProperty("password","123456")
    mysql_connect.setProperty("driver","com.mysql.jdbc.Driver")

    spark.sql("use ods03")

    //  user_info
    spark.sql("drop table if exists user_info")
    spark.sql(
      """
        |create table if not exists ods03.user_info(
        |id int,
        |login_name string,
        |nick_name string,
        |passwd string,
        |name string,
        |phone_num string,
        |email string,
        |head_img string,
        |user_level string,
        |birthday timestamp,
        |gender string,
        |create_time timestamp,
        |operate_time timestamp
        |)
        |partitioned by(etl_date String)
        |""".stripMargin)

    //  sku_info
    spark.sql("drop table if exists sku_info")
    spark.sql(
      """
        |create table if not exists ods03.sku_info(
        |id int,
        |spu_id int,
        |price decimal(10,0),
        |sku_name string,
        |sku_desc string,
        |weight decimal(10,2),
        |tm_id int,
        |category3_id int,
        |sku_default_img string,
        |create_time timestamp
        |)
        |partitioned by(etl_date String)
        |""".stripMargin)


    //  base_province
  spark.sql("drop table if  exists base_province")
    spark.sql(
      """
        |create table if not exists ods03.base_province(
        |id int,
        |name string,
        |region_id string,
        |area_code string,
        |iso_code string,
        |create_time timestamp
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)

    //  base_region
    spark.sql("drop table if exists base_region")
    spark.sql(
      """
        |create table if not exists ods03.base_region(
        |id string,
        |region_name string,
        |create_time timestamp
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)

    //  order_info
    spark.sql("drop table if exists order_info")
    spark.sql(
      """
        |create table if not exists ods03.order_info(
        |id int,
        |consignee string,
        |consignee_tel string,
        |final_total_amount decimal(16,2),
        |order_status string,
        |user_id int,
        |delivery_address string,
        |order_comment string,
        |out_trade_no string,
        |trade_body string,
        |create_time timestamp,
        |operate_time timestamp,
        |expire_time timestamp,
        |tracking_no string,
        |parent_order_id int,
        |img_url string,
        |province_id int,
        |benefit_reduce_amount decimal(16,2),
        |original_total_amount decimal(16,2),
        |feight_fee decimal(16,2)
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)

    //  order_detail
    spark.sql("drop table if exists order_detail")
    spark.sql(
      """
        |create table if not exists ods03.order_detail(
        |id int,
        |order_id int,
        |sku_id int,
        |sku_name string,
        |img_url string,
        |order_price decimal(10,2),
        |sku_num string,
        |create_time timestamp,
        |source_type string,
        |source_id int
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)

    //  检查表是否创建
    spark.sql("show tables").show

     // 创建加载数据的方法
     // 插入数据的话使用insert into 表名 如果想要覆盖表的数据的话就要insert overwrite table 表名字
    def write_hive(mysql_table:String,ods_table:String):Unit={
      spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false",mysql_table,mysql_connect)
        .createOrReplaceTempView("temp")
      spark.sql(
        s"""
          |insert into ods03.${ods_table}
          |partition(etl_date="20240101")
          |select * from temp
          |""".stripMargin)
    }

    //  加载数据
    write_hive("user_info","user_info")
    write_hive("sku_info","sku_info")
    write_hive("order_info","order_info")
    write_hive("order_detail","order_detail")

    //  由于下面两个表抽取数据还需要增加字段，所以我们创建原始数据表时，也必须要考虑这个字段，所以要单独准备数据，否则插曲数据字段数量不对
    spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false","base_province",mysql_connect)
      .withColumn(
        "create_time",
        lit( new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date())).cast("timestamp")
      )
      .createOrReplaceTempView("base_province_temp")

    spark.sql(
      """
        |insert into ods03.base_province
        |partition(etl_date="20240101")
        |select * from base_province_temp
        |""".stripMargin)

    spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false","base_region",mysql_connect)
      .withColumn(
        "create_time",
        lit(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date())).cast("timestamp")
      )
      .createOrReplaceTempView("base_region_temp")

    spark.sql(
      """
        |insert overwrite table ods03.base_region
        |partition(etl_date="20240101")
        |select * from base_region_temp
        |""".stripMargin)

    //  关闭环境
    spark.close()
  }

}
