import com.o2o.utils.Iargs
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.elasticsearch.spark._
/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/6/5 16:23
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object UpdateEsCateGory {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName(s"${this.getClass.getSimpleName}")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")
      .config("es.nodes", "192.168.1.29")
      .config("es.port", "9200")
      .config("cluster.name","O2OElastic")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", Iargs.OBSACCESS)
    sc.hadoopConfiguration.set("fs.s3a.secret.key", Iargs.OBSSECRET)
    sc.hadoopConfiguration.set("fs.s3a.endpoint", Iargs.OBSENDPOINT)
    sc.setLogLevel("WARN")
    val index = "2020_jumei/jumei_2020_5"

    val bakPath =s"s3a://o2o-dataproces-group/zsc/cateBakPath/190205/${index}"

    val cateGoryPath="s3a://o2o-dimension-table/category_table/categoryFile_tao/jumei_2020_6/"
    val data1 = sc.esJsonRDD(s"${index}",
      """
        |{
        |  "size": 10,
        |  "query": {
        |    "bool": {
        |      "should": [
        |        {"match_phrase": {
        |          "subCategoryName": "彩妆套装"
        |        }}
        |      ]
        |    }
        |  }
        |}
      """.stripMargin).values
    //data1.saveAsTextFile(s"${bakPath}")
    val cateDF: DataFrame = spark.read.json(cateGoryPath)
    cateDF.registerTempTable("catetab")

    val dataDF: DataFrame = spark.read.json(data1)
    dataDF.registerTempTable("datatab")

    val totalDF: DataFrame = dataDF.drop("firstcategoryid").drop("secondcategoryid").drop("thirdcategoryid").drop("fourthcategoryid").join(cateDF,Seq("subCategoryId"),"left")

    totalDF.registerTempTable("total")
    spark.sqlContext.sql(
      """
        |select
        |*
        |from
        |total
        |
      """.stripMargin).show()


    /*data1.map(line=>{
      val nObject: JSONObject = JSON.parseObject(line)
      nObject.put("firstCategoryId","10019")
      nObject.put("secondCategoryId","1001902")
      nObject.put("thirdCategoryId","100190205")
      nObject.put("fourthCategoryId","10019020599")
      nObject
    }).saveToEs(s"${index}",
        Map("es.mapping.id" -> "good_id", "es.nodes" -> s"192.168.1.29",
          "es.port" -> "9200",
          "cluster.name" -> "Es-OTO-Data"))*/

  sc.stop()
}
}
