import com.o2o.utils.Iargs
import org.apache.spark.sql.SparkSession

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/6/5 16:23
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object BrandDataJoin {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName(s"${this.getClass.getSimpleName}")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")
      .config("es.nodes", "192.168.1.29")
      .config("es.port", "9200")
      .config("cluster.name","O2OElastic")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", Iargs.OBSACCESS)
    sc.hadoopConfiguration.set("fs.s3a.secret.key", Iargs.OBSSECRET)
    sc.hadoopConfiguration.set("fs.s3a.endpoint", Iargs.OBSENDPOINT)
    sc.setLogLevel("WARN")


    spark.read.orc("s3a://dws-data/g_data/2020/shuang11/2020_presale_2_tmall/tmall_orc/").printSchema()

    import org.elasticsearch.spark._
    import com.alibaba.fastjson.{JSON, JSONObject}
    spark.read.orc("s3a://dws-data/g_data/2020/shuang11/2020_presale_2_tmall/tmall_orc/")
//    spark.read.orc("s3a://o2o-sourcedata/obs-source-2020/10/tmall_zhengshi_1/brandresult_orc/2020/10/")
          .toJSON.rdd.map(line=>{
        val lines: JSONObject = JSON.parseObject(line)


      val good_id: String = lines.getString("good_id")
      val stage: String = "20201021_20201031"

      val index = good_id+stage

      val evaluates = lines.get("evaluates").toString
      var ev = "-1"
      var str = new JSONObject
      if (evaluates.equals("-1")) {
        ev = "{\"fuyi\":\"-1\"}"
      } else {
        str = JSON.parseObject(evaluates)
      }
      if (!ev.contains("fuyi")) {
        lines.put("evaluates", str)
      } else {
        val evs = JSON.parseObject(ev)
        lines.put("evaluates", evs)
      }


      lines.put("timeStamp","1604332800")
      lines.put("index",index)
//      lines.put("is_presale","true")

      lines
      }).saveToEs("tmall_2020_activity/tmall_2020_11_1_2020_tui",Map("es.mapping.id"->"index"))







/*
    spark.read.json("s3a://o2o-dimension-table/brandName_table/2020/10/intime/")
      .withColumnRenamed("brand_islaozihao","brand_isLaoZiHao")
      .withColumnRenamed("brandccid","brandCcId")
      .withColumnRenamed("brandname","brandName")
      .withColumnRenamed("brandname_cn","brandName_cn")
      .withColumnRenamed("brandname_en","brandName_en")
      .withColumnRenamed("brandvalueid","brandValueId")
      .withColumnRenamed("firstcategoryid","firstCategoryId")
      .write.json("s3a://o2o-dimension-table/brandName_table/2020/10/intime_new/")*/



//    import org.elasticsearch.spark._
//
//    val values: RDD[String] = sc.esJsonRDD("hmxs_2019_activity/hmxs_2019_11").values
//
//    spark.read.json(values).write.orc("s3a://dws-data/g_data/2020/shuang11/2019_yoy/txd")


 /*   val apth = "s3a://dws-data/g_data/2020/shuang11/2019_yoy/jdpaimai/"
    val apth1 = "s3a://dws-data/g_data/2020/shuang11/2019_yoy/taobaopaimai/"
//    val apth = "s3a://dws-data/g_data/2020/shuang11/2019_yoy/elm/"


    spark.read.orc(apth).registerTempTable("tab1")

    spark.sql(
      """
        |select
        |count(1),
        |sum(sellCount),
        |sum(salesAmount)
        |from
        |tab1
      """.stripMargin).show(false)


    spark.read.orc(apth1).registerTempTable("tab2")

    spark.sql(
      """
        |select
        |count(1),
        |sum(sellCount),
        |sum(salesAmount)
        |from
        |tab2
      """.stripMargin).show(false)
*/

//    spark.read.json("C:\\Users\\o2o-rd-0008\\Desktop\\part-00000-7805dae7-e0a1-4e8f-8187-b3f0c724b58c-c000.json").where("shopId='0'").show()












//    spark.read.orc(apth).printSchema()     //.selectExpr("timeStamp").distinct().show()



//    spark.sql(
//      """
//        |select
//        |count(distinct shopId)
//        |from
//        |tab
//        |where delivery_from like '%广西%'
//      """.stripMargin).show(false)


//    spark.read.orc(apth).selectExpr("shopId","delivery_from","userId").distinct()
//      .write.json("s3a://o2o-dataproces-group/zsc/product/taobao/2020/9/caijibuxuqiu/")

/*    val newBrand: DataFrame = spark.read.option("header",true).option("delimiter",",").csv("D:/zsc/品牌/各行业TOP30品牌10021-3/part-00000-cc31156a-6a3b-49a9-8ad8-a737a3df55e8-c000.csv")
//    val newBrand: DataFrame = spark.read.option("header",true).option("delimiter",",").csv("D:/zsc/品牌/各行业TOP30品牌.csv")

    newBrand.registerTempTable("tab")

    val frame: DataFrame = spark.sql(
      """
        |select
        |brandName_cn,
        |firstCategoryId,
        |brand_state
        |from
        |tab
        |where brand_state is null or brand_state='' or brand_state='0' or brand_state='-1'
      """.stripMargin)

    frame.registerTempTable("tab1")
//    val taobaoCatePath = "s3a://o2o-dataproces-group/li_yinchao/Table/2020/6_1/taobao/"
    val taobaoCatePath = "s3a://o2o-dataproces-group/li_yinchao/Table/2020/7/tmall/"

    val brandTableOld = spark.read.json(taobaoCatePath)

    brandTableOld.registerTempTable("cate")

    val result = spark.sql(
      """
        |
        |select
        |distinct
        |a.brandName_cn,
        |a.firstCategoryId,
        |b.brand_state
        |from
        |tab1 a
        |left join
        |cate b
        |on a.brandName_cn=b.brandName_cn
        |
      """.stripMargin)

    result.repartition(1).write.csv("D:/zsc/品牌/各行业TOP30品牌10021-5")*/


    sc.stop()
  }
}
