package com.o2o.cleaning.month.platform.ebusiness_plat.suning

import com.o2o.utils.Iargs
import org.apache.spark.sql.SparkSession

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/6/5 16:23
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object BrandDataJoin {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName(s"${this.getClass.getSimpleName}")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")
      .config("es.nodes", "192.168.1.29")
      .config("es.port", "9200")
      .config("cluster.name","O2OElastic")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
//      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", Iargs.OBSACCESS)
    sc.hadoopConfiguration.set("fs.s3a.secret.key", Iargs.OBSSECRET)
    sc.hadoopConfiguration.set("fs.s3a.endpoint", Iargs.OBSENDPOINT)
    sc.setLogLevel("WARN")

    val apth = "s3a://o2o-dataproces-group/zsc/product/taobao/2020/9/caijibuxuqiu/"

    spark.read.json(apth).repartition(1).write.json("s3a://o2o-dataproces-group/zsc/product/taobao/2020/9/caijibuxuqiunew/")


//    val apth = "s3a://o2o-dataproces-group/zsc/product/taobao/2020/9/handldAddressData/"
//
//    spark.read.orc(apth).selectExpr("shopId","delivery_from","userId").distinct()
//      .write.json("s3a://o2o-dataproces-group/zsc/product/taobao/2020/9/caijibuxuqiu/")

/*    val newBrand: DataFrame = spark.read.option("header",true).option("delimiter",",").csv("D:/zsc/品牌/各行业TOP30品牌10021-3/part-00000-cc31156a-6a3b-49a9-8ad8-a737a3df55e8-c000.csv")
//    val newBrand: DataFrame = spark.read.option("header",true).option("delimiter",",").csv("D:/zsc/品牌/各行业TOP30品牌.csv")

    newBrand.registerTempTable("tab")

    val frame: DataFrame = spark.sql(
      """
        |select
        |brandName_cn,
        |firstCategoryId,
        |brand_state
        |from
        |tab
        |where brand_state is null or brand_state='' or brand_state='0' or brand_state='-1'
      """.stripMargin)

    frame.registerTempTable("tab1")
//    val taobaoCatePath = "s3a://o2o-dataproces-group/li_yinchao/Table/2020/6_1/taobao/"
    val taobaoCatePath = "s3a://o2o-dataproces-group/li_yinchao/Table/2020/7/tmall/"

    val brandTableOld = spark.read.json(taobaoCatePath)

    brandTableOld.registerTempTable("cate")

    val result = spark.sql(
      """
        |
        |select
        |distinct
        |a.brandName_cn,
        |a.firstCategoryId,
        |b.brand_state
        |from
        |tab1 a
        |left join
        |cate b
        |on a.brandName_cn=b.brandName_cn
        |
      """.stripMargin)

    result.repartition(1).write.csv("D:/zsc/品牌/各行业TOP30品牌10021-5")*/


    sc.stop()
  }
}
