package cn.doitedu.dw_etl.dataload

import java.util.Properties

import org.apache.spark.sql.SparkSession

object DimTableLoader {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder().enableHiveSupport().appName("维表加载").master("local").getOrCreate()

    val props = new Properties()
    props.load(DimTableLoader.getClass.getClassLoader.getResourceAsStream("prod/db.properties"))
    // 加载商品信息表
    val df = spark.read.jdbc("jdbc:mysql://localhost:3306/realtimedw?useUnicode=true&characterEncoding=utf8", "pms_product", props)
    df.write.saveAsTable("dim.pms_product")


    // 加载商品品类信息表
    val df2 = spark.read.jdbc("jdbc:mysql://localhost:3306/realtimedw?useUnicode=true&characterEncoding=utf8", "pms_product_category", props)
    df2.write.saveAsTable("dim.pms_product_category")


    // 加载商品品牌信息表
    val df3 = spark.read.jdbc("jdbc:mysql://localhost:3306/realtimedw?useUnicode=true&characterEncoding=utf8", "pms_brand", props)
    df3.write.saveAsTable("dim.pms_brand")


    // 加载页面信息表
    val df4 = spark.read.jdbc("jdbc:mysql://localhost:3306/realtimedw?useUnicode=true&characterEncoding=utf8", "dim_pginfo", props)
    df4.write.saveAsTable("dim.dim_pginfo")


    // 加载栏目信息表
    val df5 = spark.read.jdbc("jdbc:mysql://localhost:3306/realtimedw?useUnicode=true&characterEncoding=utf8", "dim_lanmu", props)
    df5.write.saveAsTable("dim.dim_lanmu")


    spark.close()

  }

}
