package DianShang_2024.ds_07.extract

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.lit

import java.text.SimpleDateFormat
import java.util.{Calendar, Properties}

object table_data_perparation {
  def main(args: Array[String]): Unit = {
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("表格准备")
      .config("hive.exec.dynamic.partition.mode","nonstrict")
//      .config("spark.serializer","org.apache.spark.serializer.KryoSerializer")
//      .config("spark.sql.extensions","org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
      .enableHiveSupport()
      .getOrCreate()

    spark.sql("use ods07")

    //  user_info
    spark.sql("drop table if exists user_info")
    spark.sql(
      """
        |create table if not exists user_info(
        |id int,
        |login_name string,
        |nick_name string,
        |passwd string,
        |name string,
        |phone_num string,
        |email string,
        |head_img string,
        |user_level string,
        |birthday timestamp,
        |gender string,
        |create_time timestamp,
        |operate_time timestamp
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)

    //  sku_info
    spark.sql("drop table if exists sku_info")
    spark.sql(
      """
        |create table if not exists sku_info(
        |id int,
        |spu_id int,
        |price decimal(10,0),
        |sku_name string,
        |sku_desc string,
        |weight decimal(10,2),
        |tm_id int,
        |category3_id int,
        |sku_default_img string,
        |create_time timestamp
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)

    //  base_province
    spark.sql("drop table if exists base_province")
    spark.sql(
      """
        |create table if not exists base_province(
        |id int,
        |name string,
        |region_id string,
        |area_code string,
        |iso_code string,
        |create_time timestamp
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)

    //  base_region
    spark.sql("drop table if exists base_region")
    spark.sql(
      """
        |create table if not exists base_region(
        |id string,
        |region_name string,
        |create_time timestamp
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)

    //  order_info
    spark.sql("drop table if exists order_info")
    spark.sql(
      """
        |create table if not exists order_info(
        |id int,
        |consignee string,
        |consignee_tel string,
        |final_total_amount decimal(16,2),
        |order_status string,
        |user_id int,
        |delivery_address string,
        |order_comment string,
        |out_trade_no string,
        |trade_body string,
        |create_time timestamp,
        |operate_time timestamp,
        |expire_time timestamp,
        |tracking_no string,
        |parent_order_id int,
        |img_url string,
        |province_id int,
        |benefit_reduce_amount decimal(16,2),
        |original_total_amount decimal(16,2),
        |feight_fee decimal(16,2)
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)

    //  order_detail
    spark.sql("drop table if exists order_detail")
    spark.sql(
      """
        |create table if not exists order_detail(
        |id int,
        |order_id int,
        |sku_id int,
        |sku_name string,
        |img_url string,
        |order_price decimal(10,2),
        |sku_num string,
        |create_time timestamp,
        |source_type string,
        |source_id int
        |)
        |partitioned by(etl_date string)
        |""".stripMargin)

    spark.sql("show tables").show

    val connect=new Properties()
    connect.setProperty("user","root")
    connect.setProperty("password","123456")
    connect.setProperty("driver","com.mysql.jdbc.Driver")

    val day:Calendar=Calendar.getInstance()
    val today=new SimpleDateFormat("yyyyMMdd").format(day.getTime)
    val current_time=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(day.getTime)

    def to_hive(mysql_name:String):Unit={
      spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false",mysql_name,connect)
        .withColumn("etl_date",lit(today))
        .createOrReplaceTempView("result")

      spark.sql(
        s"""
          |insert into table ${mysql_name}
          |select * from result
          |""".stripMargin)
    }

    def to_hive02(mysql_name: String): Unit = {
      spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false", mysql_name, connect)
        .withColumn("create_time",lit(current_time).cast("timestamp"))
        .withColumn("etl_date", lit(today))
        .createOrReplaceTempView("result")

      spark.sql(
        s"""
           |insert into table ${mysql_name}
           |select * from result
           |""".stripMargin)
    }

    to_hive("user_info")
    to_hive("sku_info")
    to_hive02("base_province")
    to_hive02("base_region")
    to_hive("order_info")
    to_hive("order_detail")

    //  检查数据是否准备完成
    spark.sql("select * from order_detail limit 5").show


    spark.close()
  }

}
