package com.o2o.cleaning.month.platform.ebusiness_plat.kuaishou

import com.alibaba.fastjson.{JSON, JSONObject}
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.lit
import org.elasticsearch.spark._

object CheckOut_ES {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("KuaiShou_LiveStreaming")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")
      .config("es.nodes", "192.168.1.29")
      .config("es.port", "9200")
      .config("cluster.name", "O2OElastic")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("ERROR")


    val year = 2021
    val month = 10
    val platform = "kuaishou"
    val collection = if (month < 10) s"kuaishou_webcast_shop_list_210${month}" else s"kuaishou_webcast_shop_list_21${month}"
    val index = s"2021_kuaishou/kuaishou_2021_10"
    //    val resultPath = s"s3a://o2o-dataproces-group/zyf/livestreaming/kuaishou/resultData/${year}/${month}/${collection}/"
    val resultPath = s"s3a://o2o-dataproces-group/zyf/livestreaming/kuaishou/dws/2021/10/"
//    spark.read.orc(resultPath).registerTempTable("t_price")
//    spark.sql(
//      """
//        |select is_flagship,count(1) from t1 group by is_flagship
//        |""".stripMargin).show(false)
        println(index)
        val data = sc.esJsonRDD(index, "").values
    spark.read.json(data).registerTempTable("t1")
    spark.sql(
      """
        |select * from t1 where brandName_cn = 'DJBS'
        |""".stripMargin).show(20)
//    spark.read.json(data).write.orc(s"s3a://o2o-dataproces-group/zyf/livestreaming/kuaishou/es/2021/10/")
//      .drop("price").registerTempTable("tall")
//        val result = spark.sql(
//          """
//            |select t1.*,t2.price as priceText from tall t1
//            |left join
//            |(select good_id,price from t_price) t2
//            |on t1.good_id = t2.good_id
//            |""".stripMargin)
//          .show(false)
//          .drop("timeStamp").withColumn("timeStamp", lit("1632931200"))
    //      .write.orc(s"s3a://dws-data/g_data/kuaishou/oss/2021/9/")
    //    spark.read.json(data)
    //      .drop("firstCategoryId")
    //      .drop("secondCategoryId")
    //      .drop("thirdCategoryId")
    //      .drop("fourthCategoryId")
    //      .registerTempTable("t_all")
    //    spark.read.orc(resultPath).registerTempTable("t_category")
    //    val result = spark.sql(
    //      """
    //        |select t1.*,
    //        |t2.firstCategoryId as firstCategoryId,
    //        |t2.secondCategoryId as secondCategoryId,
    //        |t2.thirdCategoryId as thirdCategoryId,
    //        |t2.fourthCategoryId as fourthCategoryId
    //        |from t_all t1
    //        |left join
    //        |t_category t2
    //        |on t1.categoryId = t2.categoryId and t1.good_id = t2.good_id
    //        |""".stripMargin)
    //
//            result.toJSON.rdd.map(line => {
//              val nObject: JSONObject = JSON.parseObject(line)
//              nObject
//            }).saveToEs(s"${index}",
//              Map("es.mapping.id" -> "good_id", "es.nodes" -> s"192.168.1.29",
//                "es.port" -> "9200",
//                "cluster.name" -> "Es-OTO-Data"))

    //      .show(false)
    //    spark.sql(
    //      """
    //        |select shopName,shopId from
    //        |(select shopName,shopId,rank() over(partition by shopId order by sum(salesAmount) desc) rank
    //        |from t1
    //        |where subplatformName = 'xiaodian' group by shopName,shopId)
    //        |where rank = 1
    //        |""".stripMargin)
    //      //      .show(false)
    //      .registerTempTable("t_shop")
    //    spark.read.json(data).drop("shopName").registerTempTable("t_all")
    //    val result = spark.sql(
    //      """
    //        |select t1.*,t2.shopName from t_all t1
    //        |left join t_shop t2
    //        |on t1.shopId = t2.shopId
    //        |""".stripMargin)
    //    //      .show(false)
    //    result.toJSON.rdd.map(line => {
    //      val nObject: JSONObject = JSON.parseObject(line)
    //      nObject
    //    }).saveToEs(s"${index}",
    //      Map("es.mapping.id" -> "good_id", "es.nodes" -> s"192.168.1.29",
    //        "es.port" -> "9200",
    //        "cluster.name" -> "Es-OTO-Data"))


    /**
      * 下边这段代码是解决快手的分类id为空打成100999999,
      */
    //    val data = sc.esJsonRDD(index,
    //      """
    //        |{"size": 10,
    //        |"query": {
    //        |  "bool": {
    //        |    "must_not": [
    //        |      {
    //        |        "bool": {
    //        |          "filter": {
    //        |            "exists": {
    //        |              "field": "firstCategoryId"
    //        |            }
    //        |          }
    //        |        }
    //        |      }
    //        |    ]
    //        |  }
    //        |}
    //        |}
    //      """.stripMargin).values
    //    println(s"-----修改 ：${index} 的数据 -----")
    //    if (data.isEmpty() == false) {
    //      val data1 = spark.read.json(data).drop("firstCategoryId").drop("secondCategoryId").drop("thirdCategoryId").drop("fourthCategoryId")
    //      data1.toJSON.rdd.map(line => {
    //        val nObject: JSONObject = JSON.parseObject(line)
    //        nObject.put("firstCategoryId", "10099")
    //        nObject.put("secondCategoryId", "1009999")
    //        nObject.put("thirdCategoryId", "100999999")
    //        nObject.put("fourthCategoryId", "10099999999")
    //        nObject
    //      }).saveToEs(s"${index}",
    //        Map("es.mapping.id" -> "good_id", "es.nodes" -> s"192.168.1.29",
    //          "es.port" -> "9200",
    //          "cluster.name" -> "Es-OTO-Data"))
    //    }
  }
}
