package com.o2o.cleaning.month.platform.ebusiness_plat.taobao

import com.o2o.utils.Iargs
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2021/2/1 15:08
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object TaobaoAddress {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName(s"${this.getClass.getSimpleName}")
//        .master("local[*]")
      //      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", Iargs.OBSACCESS)
    sc.hadoopConfiguration.set("fs.s3a.secret.key", Iargs.OBSSECRET)
    sc.hadoopConfiguration.set("fs.s3a.endpoint", Iargs.OBSENDPOINT)
    sc.setLogLevel("WARN")

    /*val readUri = "mongodb://root:O2Odata123!@ 192.168.0.149:27017/admin"

    val readDatabase = "Address"
    val readCollection = "taobao_address_all"

    try {
      val value: MongoRDD[Document] = MongoSpark.load(sc,ReadConfig(Map("uri" -> readUri, "database" -> readDatabase, "collection" -> readCollection)))


      val rdd: RDD[String] = value.map(line => {
        val nObject: JSONObject = JSON.parseObject(line.toJson())
        nObject.remove("_id")
        nObject.toString
      })

      val frame: DataFrame = spark.read.json(rdd).cache()
      frame.write.orc("s3a://o2o-dataproces-group/zsc/taobao/address/2021/")
    }catch {
      case e:Exception=>{
        println(e)
      }
    }*/

    val frame: DataFrame = spark.read.orc("s3a://o2o-dataproces-group/zsc/taobao/address/2021/").withColumnRenamed("company_name","name")

    val frame1: DataFrame = spark.read.json("s3a://o2o-dataproces-group/zsc/standardData/shopAddress_company/shop_20200513/")

    frame.registerTempTable("t1")
    frame1.registerTempTable("t2")
    /*val regional_ID = spark.sql(
      """
        |select
        |shopId,regional_ID,region,province,city,address
        |from
        |(select
        |t1.shopId,
        |case when t1.regional_ID!=t2.regional_ID and t1.regional_ID != '-1' and t1.regional_ID != '0' and t1.regional_ID is not null then t2.regional_ID else 0 end regional_ID,
        |case when t1.regional_ID!=t2.regional_ID and t1.regional_ID != '-1' and t1.regional_ID != '0' and t1.regional_ID is not null then t1.regional_ID else 0 end region
        |,t1.province,t1.city,t1.address
        |from
        |t2
        |left join
        |t1
        |on t1.shopId=t2.shopId
        |where t1.shopId is not null
        |) a
        |group by shopId,a.regional_ID,region,province,city,address
      """.stripMargin)

    regional_ID.registerTempTable("reg")

    spark.read.orc("s3a://dws-data/g_data/2020/12/taobao/").registerTempTable("tab")

    spark.sql(
      """
        |select
        |shopId,shopName,sum(salesAmount) salesAmount,sum(sellCount) sellCount,province,city,district,address,regional_ID_new,region,pro,cit,dist
        |from
        |(
        |select
        |t1.*,
        |t2.regional_ID as regional_ID_new,
        |t2.region,
        |t2.province as pro,
        |t2.city as cit,
        |t2.address as dist
        |from
        |tab t1
        |left join
        |reg t2
        |on t1.shopId=t2.shopId
        |where t2.shopId is not null and t2.regional_ID is not null and t2.regional_ID != 0 and t2.region != 0
        |) a
        |group by shopId,shopName,province,city,district,address,regional_ID_new,region,pro,cit,dist
        |order by salesAmount desc
        |limit 1000
      """.stripMargin).write.option("header",true).csv("s3a://o2o-dataproces-group/zsc/taobao/address_region_id/202101/")*/




    /*spark.sql(
      """
        |select
        |count(1) allct,
        |sum(case when t1.name!=t2.name and t1.name != '-1' and t1.name != '0' and t1.name is not null then 1 else 0 end) namect,
        |sum(case when t1.address!=t2.address and t1.address != '-1' and t1.address != '0' and t1.address is not null then 1 else 0 end) addct,
        |sum(case when t1.city!= t2.city and t1.city != '-1' and t1.city != '0' and t1.city is not null then 1 else 0 end) cityct,
        |sum(case when t1.latitude!=t2.latitude and t1.latitude != '-1' and t1.latitude != '0' and t1.latitude is not null then 1 else 0 end) latitudect,
        |sum(case when t1.longitude!=t2.longitude and t1.longitude != '-1' and t1.longitude != '0' and t1.longitude is not null then 1 else 0 end) longitudect,
        |sum(case when t1.province!=t2.province and t1.province != '-1' and t1.province != '0' and t1.province is not null then 1 else 0 end) provincect,
        |sum(case when t1.regional_ID!=t2.regional_ID and t1.regional_ID != '-1' and t1.regional_ID != '0' and t1.regional_ID is not null then 1 else 0 end) regional_IDct,
        |sum(case when t1.registration_institution!=t2.registration_institution and t1.registration_institution != '-1' and t1.registration_institution != '0' and t1.registration_institution is not null then 1 else 0 end) registration_institutionct
        |from
        |t2
        |left join
        |t1
        |on t1.shopId=t2.shopId
        |where t1.shopId is not null
      """.stripMargin).show(false)
*/

 /*   val newres = spark.sql(
      """
        |select
        |t1.*
        |from
        |t1
        |left join
        |t2
        |on t1.shopId=t2.shopId
        |where t2.shopId is null
      """.stripMargin)

    newres.write.json("s3a://o2o-dataproces-group/zsc/taobao/address_new/2021/")*/


   /* spark.sql(
      """
        |select
        |*
        |from
        |t2
        |
        |
        |
      """.stripMargin)*/


  }
}
