package com.o2o.cleaning.month.platform.ebusiness_plat.taobao

import com.o2o.utils.Iargs
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2021/2/2 14:23
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object UnionTmallAddress {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName(s"${this.getClass.getSimpleName}")
              .master("local[*]")
      //      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", Iargs.OBSACCESS)
    sc.hadoopConfiguration.set("fs.s3a.secret.key", Iargs.OBSSECRET)
    sc.hadoopConfiguration.set("fs.s3a.endpoint", Iargs.OBSENDPOINT)
    sc.setLogLevel("WARN")

    val taobaoNewpath = "s3a://o2o-dataproces-group/zsc/tmall/address_new_result/2021/"
    val taobaoOldpath = "s3a://o2o-dimension-table/address_table/address_table_2020/12/address_platform/tmall_address_2020_12/"
    val taobaoNewAllPath = "s3a://o2o-dataproces-group/zsc/tmall/address_new_result/2021_all/"
/*


    spark.read.json(taobaoNewpath).printSchema()
    spark.read.json(taobaoOldpath).printSchema()
    spark.read.json(taobaoNewAllPath).printSchema()

*/

    //1、将旧的集合与新的集合关联，更新
    spark.read.json(taobaoOldpath).registerTempTable("oldtab")
    spark.read.json(taobaoNewAllPath).where("regional_ID != '0' and regional_ID != '-1' and province != '-1' and province != '0' and city != '0' and city != '-1'").registerTempTable("alltab")

    val gyres = spark.sql(
      """
        |select
        |t1.*,
        |case when t1.province!=t2.province then t2.province else t1.province end province_tmp,
        |case when t1.city!=t2.city then t2.city else t1.city end city_tmp,
        |case when t1.district!=t2.district then t2.district else t1.district end district_tmp,
        |case when t1.address!=t2.address then t2.address else t1.address end address_tmp,
        |case when t1.name!=t2.name then t2.name else t1.name end name_tmp,
        |case when t1.regional_ID!=t2.regional_ID then t2.regional_ID else t1.regional_ID end regional_ID_tmp,
        |case when t1.registration_institution!=t2.registration_institution then t2.registration_institution else t1.registration_institution end registration_institution_tmp,
        |case when t1.economic_division!=t2.economic_division then t2.economic_division else t1.economic_division end economic_division_tmp
        |from
        |oldtab t1
        |left join
        |alltab t2
        |on t1.shopId=t2.shopId
        |--where t2.shopId is not null
      """.stripMargin)
        .drop("province").drop("city").drop("district").drop("address").drop("name").drop("regional_ID").drop("registration_institution").drop("economic_division")
      .withColumnRenamed("province_tmp","province")
      .withColumnRenamed("city_tmp","city")
      .withColumnRenamed("district_tmp","district")
      .withColumnRenamed("address_tmp","address")
      .withColumnRenamed("name_tmp","name")
      .withColumnRenamed("regional_ID_tmp","regional_ID")
      .withColumnRenamed("registration_institution_tmp","registration_institution")
      .withColumnRenamed("economic_division_tmp","economic_division")


    spark.read.json(taobaoNewpath)
      .where("regional_ID != '0' and regional_ID != '-1' and province != '-1' and province != '0' and city != '0' and city != '-1'")
      .registerTempTable("newtab")

    val newres = spark.sql(
      """
        |select
        |*
        |from
        |newtab
      """.stripMargin)

    val frame: DataFrame = gyres.selectExpr("address",
      "administrative_region",
      "aedzId",
      "city",
      "city_grade",
      "city_origin",
      "district",
      "district_origin",
      "economic_division",
      "if_city",
      "if_district",
      "if_state_level_new_areas",
      "latitude",
      "longitude",
      "name",
      "poor_counties",
      "province",
      "regional_ID",
      "registration_institution",
      "rural_demonstration_counties",
      "rural_ecommerce",
      "shopId",
      "the_belt_and_road_city",
      "the_belt_and_road_province",
      "the_yangtze_river_economic_zone_city",
      "the_yangtze_river_economic_zone_province",
      "town",
      "urban_agglomerations")

    val frame1: DataFrame = newres.selectExpr("address",
      "administrative_region",
      "aedzId",
      "city",
      "city_grade",
      "city_origin",
      "district",
      "district_origin",
      "economic_division",
      "if_city",
      "if_district",
      "if_state_level_new_areas",
      "latitude",
      "longitude",
      "name",
      "poor_counties",
      "province",
      "regional_ID",
      "registration_institution",
      "rural_demonstration_counties",
      "rural_ecommerce",
      "shopId",
      "the_belt_and_road_city",
      "the_belt_and_road_province",
      "the_yangtze_river_economic_zone_city",
      "the_yangtze_river_economic_zone_province",
      "town",
      "urban_agglomerations")
    val unit: Dataset[Row] = frame1.union(frame)

    unit.write.mode("overwrite").json("s3a://o2o-dimension-table/address_table/address_table_2021/1/address_platform/tmall_address_2021_1/")


  }
}
