package com.o2o.cleaning.month.platform.ebusiness_plat.wangyiyanxuan

import com.alibaba.fastjson.JSON
import org.apache.spark.sql.SparkSession
import org.elasticsearch.spark.{sparkContextFunctions, sparkRDDFunctions}

object _01_Update_category {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().appName("Other")
      .master("local[*]")
      .config("es.nodes", s"192.168.1.29")
      .config("es.port", "9200")
      .config("cluster.name", "O2OElastic")
      .getOrCreate()
    spark.sparkContext.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    spark.sparkContext.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    spark.sparkContext.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    val index = "2021_wangyiyanxuan/wangyiyanxuan_2021_6"
    val values = spark.sparkContext.esJsonRDD(s"${index}").values
    spark.read.json(values).drop("swbfirstName").registerTempTable("esData")


    //读取添加的数据standId
    spark.read.option("header", true).csv(s"s3a://o2o-dimension-table/category_table/wangyiyanxuan_cate/wangyiyanxuan_cate_202112").registerTempTable("cateTable")

        val frame = spark.sql(
          """
            |-- select count(1),sum(sellCount),sum(salesAmount)
            |select a.*,b.swbfirstName
            |from esData as a
            |left join (select swbfirstId,max(swbfirstName) swbfirstName from cateTable group by swbfirstId) b
            |on a.swbfirstId = b.swbfirstId
            |""".stripMargin)
//          .show(false)

        val value1 = frame.toJSON.rdd.map(line => {
          val lines = JSON.parseObject(line)
          lines
        })

        value1.saveToEs(s"${index}"
          , Map("es.mapping.id" -> "good_id"))

    spark.stop()
  }
}
