package a_o2odata_deal

import com.alibaba.fastjson.JSON
import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SQLContext

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/4/17 13:21
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object nongzi_test {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getSimpleName}")
    conf.set("spark.debug.maxToStringFields", "500")
    //conf.setMaster("local[*]")
    conf.set("es.nodes", "192.168.1.157")
    conf.set("es.port", "9200")
    conf.set("cluster.name", "O2OElastic")
    conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    conf.set("spark.sql.caseSensitive", "true")
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")

    import org.elasticsearch.spark._
    for (months <- 1 to 2) {
      val value = sc.esJsonRDD(s"2020_tmall/tmall_2020_${months}",
        s"""
           |{"size": 20,
           |  "query": {
           |    "bool": {
           |      "must": [
           |        {
           |          "bool": {
           |            "should": [
           |      {
           |        "match_phrase": {
           |          "categoryId": "121454038"
           |        }
           |      },{
           |        "match_phrase": {
           |          "categoryId": "124558013"
           |        }
           |      } ,{
           |        "match_phrase": {
           |          "categoryId": "124534019"
           |        }
           |      }
           |      ]
           |          }
           |        }
           |      ]
           |    }
           |  },"aggs": {
           |    "NAME": {
           |      "terms": {
           |        "field": "fourthCategoryId",
           |        "size": 10
           |      }
           |    }
           |  }
           |}
         """.stripMargin).values
      val source_data = sqlContext.read.json(value)
      source_data.selectExpr("good_id").createOrReplaceTempView("source_data")
      //source_data.selectExpr("good_id","fourthCategoryId").show()

      sqlContext.read.json(sc.esJsonRDD(s"2020_tmall_good_live/tmall_good_live_2020_${months}").values)
        .createOrReplaceTempView("zhibo_data")

      val result = sqlContext.sql(
        s"""
           |select
           |t1.*,
           |case when t2.good_id is null then t1.secondCategoryId else '1002402' end as secondCategoryIds,
           |case when t2.good_id is null then t1.thirdCategoryId else '100240201' end as thirdCategoryIds,
           |case when t2.good_id is null then t1.fourthCategoryId else '10024020199' end as fourthCategoryIds
           |from zhibo_data t1
           |left join
           |source_data t2
           |on t1.good_id=t2.good_id
         """.stripMargin).drop("secondCategoryId", "thirdCategoryId", "fourthCategoryId")
        .withColumnRenamed("secondCategoryIds", "secondCategoryId")
        .withColumnRenamed("thirdCategoryIds", "thirdCategoryId")
        .withColumnRenamed("fourthCategoryIds", "fourthCategoryId")

      result
      .toJSON.rdd.map(line =>{
        JSON.parseObject(line)
      }).saveToEs(s"2020_tmall_good_live/tmall_good_live_2020_${months}", Map("es.mapping.id" -> "liveAndgood_id"))
    }
  }
}
