import com.alibaba.fastjson.{JSON, JSONObject}
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}
import org.elasticsearch.spark._

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2019/11/21 15:44
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object modify_yushou_dangqi {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getSimpleName}")
    conf.set("spark.debug.maxToStringFields", "500")
    conf.setMaster("local[*]")
    conf.set("es.nodes", "192.168.1.157")
    conf.set("es.port", "9200")
    conf.set("cluster.name", "O2OElastic")
    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    conf.set("spark.sql.caseSensitive", "true")
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)
    sc.setLogLevel("WARN")
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")

    val data3 = sc.esJsonRDD("2019_presale_tmall/tmall_2019_shuang11_3",
      """
        |{
        |  "query": {
        |    "term": {
        |      "thirdCategoryId": {
        |        "value": "100190103"
        |      }
        |    }
        |  }
        |}
      """.stripMargin ).values

    val data1 = sc.esJsonRDD("2019_presale_tmall/tmall_2019_shuang11_1",
      """
        |{
        |  "query": {
        |    "term": {
        |      "thirdCategoryId": {
        |        "value": "100190103"
        |      }
        |    }
        |  }
        |}
      """.stripMargin ).values

    val value3 = sqlContext.read.json(data3)
    val value1 = sqlContext.read.json(data1)

    value3.createOrReplaceTempView("value3")
    value1.createOrReplaceTempView("value1")

    import org.apache.spark.sql.functions._
    val result_data = sqlContext.sql(
      s"""
         |select
         |*,
         |cast(sells as bigint)*cast(priceText as double) as sales,
         |cast(sells as bigint)*cast(priceTextAre as double) as salesAre
         |from
         |(select
         |t1.*,
         |case
         |when t2.good_id is null then t1.sellCount
         |else (
         |case
         |when cast(t1.sellCount as bigint)-cast(t2.sellCount as bigint)<=0 then 0
         |else cast(t1.sellCount as bigint)-cast(t2.sellCount as bigint)
         |end
         |)
         |end as sells
         |from value3 t1
         |left join
         |value1 t2
         |on t1.good_id=t2.good_id)
       """.stripMargin).drop("sellCount","salesAmount","salesAmountAre")
      .withColumnRenamed("sells","sellCount")
      .withColumnRenamed("sales","salesAmount")
      .withColumnRenamed("salesAre","salesAmountAre").where("sellCount>0")
      .drop("timeStamp").withColumn("timeStamp",lit("1572537600"))

    /*import org.apache.spark.sql.functions._
    result_data.agg(sum("salesAmount")).show(false)*/

    val value = result_data.toJSON.rdd.map(line => {
      val lines = JSON.parseObject(line)
      val evaluates = lines.getOrDefault("evaluates", "-1").toString
      val Base_Info = lines.getOrDefault("Base_Info", "-1").toString
      var promotion_price = lines.getOrDefault("promotion_price","-1").toString
      var original_cost = lines.getOrDefault("original_cost","-1").toString
      val priceText = lines.get("priceText").toString
      if (promotion_price.contains("?") | promotion_price.contains("？")){
        promotion_price = priceText
      }
      if (original_cost.contains("?") | original_cost.contains("？")){
        original_cost = priceText
      }
      var ev = "-1"
      var str = new JSONObject
      if (evaluates.equals("-1")) {
        ev = "{\"fuyi\":\"-1\"}"
      } else {
        str = JSON.parseObject(evaluates)
      }
      if (!ev.contains("fuyi")) {
        lines.put("evaluates", str)
      } else {
        val evs = JSON.parseObject(ev)
        lines.put("evaluates", evs)
      }
      lines.put("Base_Info",Base_Info)
      lines.put("promotion_price",promotion_price)
      lines.put("original_cost",original_cost)
      lines
    })
    //原始代码
    value.saveToEs(s"2019_presale_tmall/tmall_2019_shuang11_2", Map("es.mapping.id" -> "good_id"))



  }

}
