package addrdimen

import org.apache.spark.SparkContext
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}
/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/9/22 16:32
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object TmallJdAddressDimension {

  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession.builder()
      .appName(this.getClass.getName)
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .master("local[*]")
      .getOrCreate()


    var sc: SparkContext = spark.sparkContext

    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")

    sc.setLogLevel("ERROR")

    val tmallAddrPath = "s3a://o2o-dimension-table/address_table/address_table_2020/8/address_platform/tmall_address_2020_8/"

    val jdAddrPath = "s3a://o2o-dimension-table/address_table/address_table_2020/8/address_platform/jd_address_2020_8/"

    val tmallCompanyPath = "s3a://o2o-dimension-table/company_table/company_source_data_all/2020/8/tmall/"

    val jdCompanyPath = "s3a://o2o-dimension-table/company_table/company_source_data_all/2020/8/jd/"

    val tmallShopPath = "s3a://dws-data/g_shop/2020/8/tmall/"

    val jdShopPath = "s3a://dws-data/g_shop/2020/8/jd/"


//    spark.read.orc(tmallShopPath).printSchema()
//
//    spark.read.orc(jdShopPath).printSchema()


    import org.apache.spark.sql.functions._
    val tmallAddrDF: DataFrame = spark.read.json(tmallAddrPath)
      .withColumn("platformid", lit("10"))
      .withColumn("startdate", lit("2020-08-30 00:00:00"))
      .withColumn("enddate", lit("9999-12-31 00:00:00"))

    val jdAddrDF: DataFrame = spark.read.json(jdAddrPath)
      .withColumn("platformid", lit("5"))
      .withColumn("startdate", lit("2020-08-30 00:00:00"))
      .withColumn("enddate", lit("9999-12-31 00:00:00"))

    val allPlatAddrDF: Dataset[Row] = jdAddrDF.union(tmallAddrDF)

    allPlatAddrDF.write.orc("s3a://o2o-dataproces-group/zsc/address_table/")

    val tmallCompanyDF: DataFrame = spark.read.orc(tmallCompanyPath)
      .withColumn("platformid", lit("10"))
      .withColumn("startdate", lit("2020-08-30 00:00:00"))
      .withColumn("enddate", lit("9999-12-31 00:00:00"))

    val jdCompanyDF: DataFrame = spark.read.orc(jdCompanyPath)
      .withColumn("platformid", lit("5"))
      .withColumn("startdate", lit("2020-08-30 00:00:00"))
      .withColumn("enddate", lit("9999-12-31 00:00:00"))

    val allPlatCompanyDF: Dataset[Row] = jdCompanyDF.union(tmallCompanyDF)

    allPlatCompanyDF.write.orc("s3a://o2o-dataproces-group/zsc/company_table/")

  }

}
