package com.o2o.cleaning.month.platform.ebusiness_plat.elm

import org.apache.spark.sql.SparkSession
import org.elasticsearch.spark.sparkContextFunctions

//新提取的分类数据关联es数据
object join_newCate_To_ES {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().appName("Other")
      .master("local[*]")
      .config("es.nodes", s"192.168.1.29")
      .config("es.port", "9200")
      .config("cluster.name", "O2OElastic")
      .getOrCreate()
    spark.sparkContext.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    spark.sparkContext.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    spark.sparkContext.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    val index = "2020_rsmsh/rsmsh_2020_12"
    val values = spark.sparkContext.esJsonRDD(s"${index}").values
    spark.read.json(values).registerTempTable("esData")
//    读取添加的数据standId
    spark.read.json(s"s3a://o2o-dataproces-group/zheng_liangliang/rsmsh/2020/12/rsmsh_label/").registerTempTable("cateTable")

    val frame = spark.sql(
      """
        |select a.*,
        |b.rootCategoryName as root,
        | b.rootCategoryId as rootId,
        | b.categoryName as categoryName,
        | b.categoryId as categoryId,
        | b.subCategoryName as subCategoryName,
        | b.subCategoryId as subCategoryId
        | from esData as a
        |
        |left join cateTable as b on
        |a.good_id = b.good_id where b.good_id is not null
        |""".stripMargin)
      .drop("firstCategoryId", "secondCategoryId", "thirdCategoryId", "fourthCategoryId", "rootCategoryName", "rootCategoryId")
      .withColumnRenamed("root", "rootCategoryName")
      .withColumnRenamed("rootId", "rootCategoryId")

    frame.registerTempTable("t")
    spark.read.json("s3a://o2o-dimension-table/category_table/jd/jd_sub_v26/").registerTempTable("cate")
//
    spark.sql(
      """
        |select a.*,b.fourthCategoryId,
        | b.thirdCategoryId,b.secondCategoryId,b.firstCategoryId
        |
        | from t a left join cate b on a.subCategoryId = b.subCategoryId
        |""".stripMargin).write.json("s3a://o2o-dataproces-group/zheng_liangliang/rsmsh/2020/12/rsmsh_join_jdcate2/")
////
////
//    val frame2 = spark.read.json("s3a://o2o-dataproces-group/zheng_liangliang/rsmsh/2020/12/rsmsh_join_jdcate2/")















//    println(frame2.count())
//    frame2.printSchema()
//    frame2.show()
//    frame2
//      .selectExpr("subCategoryId").where("subCategoryId is null or subCategoryId = '-1'").show()


//    val value1 = frame2.toJSON.rdd.map(line => {
//      val lines = JSON.parseObject(line)
//      lines
//    })
//    //
//    value1.saveToEs(s"${index}"
//            , Map("es.mapping.id" -> "good_id"))


      //    val frame2 = spark.sql(
      //      """
      //        |select a.*
      //        | from esData as a
      //        |
      //        |left join cateTable as b on
      //        |a.good_id = b.good_id where b.good_id is null
      //        |""".stripMargin)
      //
      //    frame2.write.json("s3a://o2o-dataproces-group/zheng_liangliang/rsmsh/2020/12/rsmsh_not_join_jdcate/")
      spark.stop()
  }
}
