package DianShang_2024.ds_01.extract

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.lit
import shapeless.syntax.typeable.typeableOps

import java.text.SimpleDateFormat
import java.time.LocalDate
import java.util.{Date, Properties}

object data_preparation {
  def main(args: Array[String]): Unit = {
    /*
        编写Scala代码，使用Spark将MySQL的shtd_store库中表user_info、sku_info、base_province、base_region、order_info、
        order_detail的数据增量抽取到Hive的ods库中对应表user_info、sku_info、base_province、base_region、order_info、
        order_detail中。
     */

    //  准备sparksql环境
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("准备表格和数据")
      .enableHiveSupport()
      .getOrCreate()

    //  创建ods数据库并且使用
    spark.sql(" create database if not exists ods")
    spark.sql("show databases").show
    spark.sql("use ods ")

    //  准备连接mysql的配置
    val jdbc_conf:Properties=new Properties()
    jdbc_conf.setProperty("user","root")
    jdbc_conf.setProperty("password","123456")
    jdbc_conf.setProperty("driver","com.mysql.jdbc.Driver")

    //  定义创建表格的函数
    def create_table():Unit={
      // user_info
      spark.sql(
        """
          |create table if not exists ods.user_info2(
          |id int,
          |login_name string,
          |nick_name string,
          |passwd string,
          |name string,
          |phone_num string,
          |email string,
          |head_img string,
          |user_level string,
          |birthday timestamp,
          |gender string,
          |create_time timestamp,
          |operate_time timestamp
          |)
          |partitioned by(etl_date string)
          |""".stripMargin)

      //  sku_info
      spark.sql(
        """
          |create table if not exists ods.sku_info2(
          |id int,
          |spu_id int,
          |price decimal(10,0),
          |sku_name string,
          |sku_desc string,
          |weight decimal(10,2),
          |tm_id int,
          |category3_id int,
          |sku_default_img string,
          |create_time timestamp
          |)partitioned by(etl_date string)
          |""".stripMargin)

      //  base_province
      spark.sql(
        """
          |create table if not exists ods.base_province2(
          |id int,
          |name string,
          |region_id string,
          |area_code string,
          |iso_code string,
          |create_time timestamp
          |)partitioned by(etl_date string)
          |""".stripMargin)

      //  base_region
      spark.sql(
        """
          |create table if not exists ods.base_region2(
          |id string,
          |region_name string,
          |create_time timestamp
          |)partitioned by(etl_date string)
          |""".stripMargin)

      //  order_info
      spark.sql(
        """
          |create table if not exists ods.order_info2(
          |id int,
          |consignee string,
          |consignee_tel string,
          |final_total_amount decimal(16,2),
          |order_status string,
          |user_id int,
          |delivery_address string,
          |order_comment string,
          |out_trade_no string,
          |trade_body string,
          |create_time timestamp,
          |operate_time timestamp,
          |expire_time timestamp,
          |tracking_no string,
          |parent_order_id int,
          |img_url string,
          |province_id int,
          |benefit_reduce_amount decimal(16,2),
          |original_total_amount decimal(16,2),
          |feight_fee decimal(16,2)
          |)partitioned by(etl_date string)
          |""".stripMargin)

      //  order_detail
      spark.sql(
        """
          |create table if not exists ods.order_detail2(
          |id int,
          |order_id int,
          |sku_id int,
          |sku_name string,
          |img_url string,
          |order_price decimal(10,2),
          |sku_num string,
          |create_time timestamp,
          |source_type string,
          |source_id int
          |)partitioned by(etl_date string)
          |""".stripMargin)
    }

    //  创建表格
    create_table()

    //  创建读取读取mysql数据的方法
    def read_mysql(table_name:String,view_name:String):Unit={
      spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false",s"$table_name",jdbc_conf)
        .createOrReplaceTempView(s"$view_name")
    }

    //  创建将数据写入到hive的方法  plusDays(-1):进行减少一天的操作
    def write_hive(table_name:String,view_name:String):Unit={
      //  拿到前一天的日期
      val before_time:String=LocalDate.now().plusDays(-1).toString.replace("-","")
      spark.sql(
        s"""
          |insert into ods.$table_name
          |partition(etl_date='20231017')
          |select * from $view_name
          |""".stripMargin)
    }

    /*
          将不需要添加新字段的数据先上传到hive
     */
    read_mysql("user_info","user_info_temp")
    write_hive("user_info2","user_info_temp")
    read_mysql("sku_info","sku_info_temp")
    write_hive("sku_info2","sku_info_temp")
    read_mysql("order_info","order_info_temp")
    write_hive("order_info2","order_info_temp")
    read_mysql("order_detail","order_detail_temp")
    write_hive("order_detail2","order_detail_temp")

    /*
        将需要新添加字段的数据导入到hive
        下面的操作是读取数据并且添加一个字段，字段名为create_time,lit函数是生成一列的数据，给这一列数据赋值(一整列的数据都是相同的)
        然后将字符串类型的时间转化为timestamp类型，就可以传送到hive里面去
     */
    spark.read.jdbc("jdbc:mysql://192.168.40.110:3306/shtd_store?useSSL=false","base_province",jdbc_conf)
      .withColumn("create_time",lit(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date())).cast("timestamp"))
      .createOrReplaceTempView("base_province_temp")

    spark.read.jdbc("jdbc:mysql://192.168.40.110/shtd_store?useSSL=false","base_region",jdbc_conf)
      .withColumn("create_time",lit(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date())).cast("timestamp"))
      .createOrReplaceTempView("base_region_temp")

    spark.sql(
      """
        |insert into ods.base_province2
        |partition(etl_date='20231017')
        |select * from base_province_temp
        |""".stripMargin)

    spark.sql(
      """
        |insert into ods.base_region2
        |partition(etl_date='20231017')
        |select * from base_region_temp
        |""".stripMargin)


    //  关闭sparksql环境
    spark.close()
  }

}
