import com.alibaba.fastjson.{JSON, JSONObject}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SQLContext
import org.elasticsearch.spark._

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2019/12/3 11:23
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object modify_cate_11 {

  def main(args: Array[String]): Unit = {
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getSimpleName}")
    conf.set("spark.debug.maxToStringFields", "500")
    conf.setMaster("local[*]")
    conf.set("es.nodes", "192.168.1.157")
    conf.set("es.port", "9200")
    conf.set("cluster.name", "O2OElastic")
    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    conf.set("spark.sql.caseSensitive", "true")
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")

    for (months <- 1 to 12){
      val value1: RDD[String] = sc.esJsonRDD(s"2019_tmall/tmall_2019_${months}",
        """
          |{"size": 20,
          |  "query": {
          |    "bool": {
          |      "must": [
          |        {
          |          "bool": {
          |            "should": [
          |      {
          |        "match_phrase": {
          |          "categoryId": "121454038"
          |        }
          |      },{
          |        "match_phrase": {
          |          "categoryId": "124558013"
          |        }
          |      } ,{
          |        "match_phrase": {
          |          "categoryId": "124534019"
          |        }
          |      }
          |      ]
          |          }
          |        }
          |      ]
          |    }
          |  },"aggs": {
          |    "NAME": {
          |      "terms": {
          |        "field": "fourthCategoryId",
          |        "size": 10
          |      }
          |    }
          |  }
          |}
        """.stripMargin).values
      val frame1 = sqlContext.read.json(value1)

      frame1.createOrReplaceTempView("frame1")

      //10024020199
      val frame = sqlContext.sql(
        s"""
           |select
           |*,
           |'10024' as firstCategoryIds,
           |'1002402' as secondCategoryIds,
           |'100240201' as thirdCategoryIds,
           |'10024020199' as fourthCategoryIds
           |from frame1
       """.stripMargin)
        .drop("firstCategoryId","secondCategoryId","thirdCategoryId","fourthCategoryId")
        .withColumnRenamed("firstCategoryIds","firstCategoryId")
        .withColumnRenamed("secondCategoryIds","secondCategoryId")
        .withColumnRenamed("thirdCategoryIds","thirdCategoryId")
        .withColumnRenamed("fourthCategoryIds","fourthCategoryId")

      //println(frame.count())
      //frame.selectExpr("firstCategoryId","secondCategoryId","thirdCategoryId","fourthCategoryId").show(false)
      val result = frame.toJSON.rdd.map(line => {
        val lines = JSON.parseObject(line)
        val evaluates = lines.getOrDefault("evaluates", "-1").toString
        val Base_Info = lines.getOrDefault("Base_Info", "-1").toString
        var ev = "-1"
        var str = new JSONObject
        if (evaluates.equals("-1")) {
          ev = "{\"fuyi\":\"-1\"}"
        } else {
          str = JSON.parseObject(evaluates)
        }
        if (!ev.contains("fuyi")) {
          lines.put("evaluates", str)
        } else {
          val evs = JSON.parseObject(ev)
          lines.put("evaluates", evs)
        }
        lines.put("Base_Info",Base_Info)
        lines
      })


      result.saveToEs(s"2019_tmall/tmall_2019_${months}", Map("es.mapping.id" -> "good_id"))
    }

    sc.stop()
  }

}
