package DianShang_2024.ds_server.extract

import org.apache.spark.sql.SparkSession

import java.util.Properties

object table_data_preparation {
  def main(args: Array[String]): Unit = {
        /*
              使用Scala编写spark工程代码，将MySQL的ds_db01库中表customer_inf、customer_inf、order_detail、order_master、product_info的数据增量抽取
              到Hive的ods库（需自建）中对应表customer_inf、order_detail、order_master、product_info中。
         */

    //  准备sparksql的环境
    val spark=SparkSession.builder()
      .master("local[*]")
      .appName("数据抽取准备表格")
      .enableHiveSupport()
      .getOrCreate()

    spark.sql("create database if not exists ods_server")
    spark.sql("show databases").show
    spark.sql("use ods_server")

    //  定义连接mysql的配置
    val jdbc_conf=new Properties()
    jdbc_conf.setProperty("user","root")
    jdbc_conf.setProperty("password","123456")
    jdbc_conf.setProperty("driver","com.mysql.jdbc.Driver")

    //  定义创建表格的函数
    def create_table():Unit={
      //  customer_inf
      spark.sql("drop table if exists ods_server.customer_inf")
      spark.sql(
        """
          |create table if not exists ods_server.customer_inf01(
          |customer_inf_id int,
          |customer_id int,
          |customer_name string,
          |identity_card_type int,
          |identity_card_no string,
          |mobile_phone string,
          |customer_email string,
          |gender string,
          |customer_point int,
          |register_time timestamp,
          |birthday timestamp,
          |customer_level int,
          |customer_money decimal(8,2),
          |modified_time timestamp
          |)
          |partitioned by(etl_date string)
          |""".stripMargin)

      //  order_detail

      spark.sql(
        """
          |create table if not exists ods_server.order_detail01(
          |order_detail_id int,
          |order_sn string,
          |product_id int,
          |product_name string,
          |product_cnt int,
          |product_price decimal(8,2),
          |average_cost decimal(8,2),
          |weight double,
          |fee_money decimal(8,2),
          |w_id int,
          |create_time string,
          |modified_time timestamp
          |)
          |partitioned by(etl_date string)
          |""".stripMargin)

        //  order_master
      spark.sql("drop table if exists ods_server.order_master")
      spark.sql(
        """
          |create table if not exists ods_server.order_master01(
          |order_id int,
          |order_sn string,
          |customer_id int,
          |shipping_user string,
          |province string,
          |city string,
          |address string,
          |order_source int,
          |payment_method int,
          |order_money decimal(8,2),
          |district_money decimal(8,2),
          |shipping_money decimal(8,2),
          |payment_money decimal(8,2),
          |shipping_comp_name string,
          |shipping_sn string,
          |create_time string,
          |shipping_time string,
          |pay_time string,
          |receive_time string,
          |order_status string,
          |order_point int,
          |invoice_title string,
          |modified_time timestamp
          |)
          |partitioned by(etl_date string)
          |""".stripMargin)

      //  product_info
      spark.sql("drop table if exists ods_server.product_info")
      spark.sql(
        """
          |create table if not exists ods_server.product_info01(
          |product_id int,
          |product_core string,
          |product_name string,
          |bar_code string,
          |brand_id int,
          |one_category_id int,
          |two_category_id int,
          |three_category_id int,
          |supplier_id int,
          |price decimal(8,2),
          |average_cost decimal(18,2),
          |publish_status int,
          |audit_status int,
          |weight double,
          |length double,
          |height double,
          |width double,
          |color_type string,
          |production_date timestamp,
          |shelf_life int,
          |descript string,
          |indate timestamp,
          |modified_time timestamp
          |)
          |partitioned by(etl_date string)
          |""".stripMargin)
    }

    //  创建hive里面的表格
    create_table()

    //  定义读取mysql数据的方法
    def read_mysql(table_name:String,view_name:String):Unit={
      spark.read.jdbc("jdbc:mysql://192.168.40.110/ds_db01?useSSL=false",s"$table_name",jdbc_conf)
        .createOrReplaceTempView(s"$view_name")
    }

    //  定义将数据写入hive的方法
    def write_hive(table_name:String,view_name:String):Unit={
      spark.sql(
        s"""
          |insert into table ods_server.$table_name
          |partition(etl_date='20240311')
          |select
          |*
          |from
          |$view_name
          |""".stripMargin)
    }

    //  将mysql里面的数据写入到hive表里面

    read_mysql("customer_inf","customer_inf_temp")
    write_hive("customer_inf01","customer_inf_temp")

    read_mysql("order_detail","order_detail_temp")
    write_hive("order_detail01","order_detail_temp")

    read_mysql("order_master","order_master_temp")
    write_hive("order_master01","order_master_temp")

    read_mysql("product_info","product_info_temp")
    write_hive("product_info01","product_info_temp")





    //  关闭sparksql的环境
    spark.close()
  }

}
