package com.o2o.cleaning.month.platform.ebusiness_plat.zaixianjy_yl.jiaoyu

import com.alibaba.fastjson.JSON
import org.apache.spark.sql.SparkSession

object CheckDataDetail {

  //  val year = 2021
  //  val month = 6
  //  val platform = "rongyigou"
  //  var collection = "rongyigou_2106"
  //  var sourcePath = s"s3a://o2o-sourcedata-2021/obs-source-2021/2021/${month}/${platform}/${collection}"
  //  var resultPath = s"s3a://o2o-dataproces-group/zyf/2021/${month}/${platform}/good_final/"

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName("CheckDataDetail")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")
      .config("es.nodes", "192.168.1.29")
      .config("es.port", "9200")
      .config("cluster.name", "O2OElastic")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("WARN")

    spark.read.orc("s3a://dws-data/split/split_data/2022/tengxunketang/1/").registerTempTable("t1")
    spark.sql(
      """
        |select * from t1
        |""".stripMargin).show(false)

    //    val lastdata = spark.read.json("s3a://o2o-sourcedata-2021/obs-source-2021/6/tengxunketang/").toJSON.rdd.map(line => {
    //      val nObject = JSON.parseObject(line)
    //      nObject.remove("_id")
    //      nObject.toString
    //    })
    //    spark.read.json(lastdata).registerTempTable("lastmonth")
    //    //      val thisdata = spark.read.orc("s3a://o2o-dataproces-group/zyf/2021/8/tengxunketang/").toJSON.rdd.map(line =>{
    //    val thisdata = spark.read.json("s3a://o2o-sourcedata-2021/obs-source-2021/7/tengxunketang/").toJSON.rdd.map(line => {
    //      val nObject = JSON.parseObject(line)
    //      nObject.remove("_id")
    //      nObject.toString
    //    })
    //    spark.read.json(thisdata).registerTempTable("thismonth")

    //     spark.sql(
    //      """
    //        |select a.servicesId,a.servicesStuCount,b.servicesStuCount from
    //        |(select
    //        |servicesId
    //        |,max(cast(servicesStuCount as bigint)) servicesStuCount
    //        |from
    //        |thismonth
    //        |group by servicesId
    //        |)a left join(
    //        |select
    //        |servicesId
    //        |,max(cast(servicesStuCount as bigint)) servicesStuCount
    //        |from
    //        |lastmonth
    //        |group by servicesId
    //        |)b on a.servicesId = b.servicesId
    //        |where b.servicesId is not null
    //        |""".stripMargin).show(false)

//    // 计算并根据 servicesId 去重
//    var result = spark.sql(
//      """
//        |select sum(aaaa) from (
//        |select case when b.servicesId is null then a.servicesStuCount else a.servicesStuCount-b.servicesStuCount end aaaa from
//        |(select
//        |servicesId
//        |,max(cast(servicesStuCount as bigint)) servicesStuCount
//        |from
//        |thismonth
//        |where servicesStuCount is not null
//        |group by servicesId
//        |)a left join(
//        |select
//        |servicesId
//        |,max(cast(servicesStuCount as bigint)) servicesStuCount
//        |from
//        |lastmonth
//        |where servicesStuCount is not null
//        |group by servicesId
//        |)b on a.servicesId = b.servicesId
//        |)
//        |where aaaa > 0
//        |""".stripMargin).show(false)
//

    //        spark.read.orc(s"s3a://o2o-dataproces-group/zyf/2021/7/tengxunketang/")
    //          .registerTempTable("t1")
    //
    //        spark.sql(
    //          """
    //            |select * from (select cast(salesAmount as double) as salesAmountd,* from t1) order by salesAmountd desc limit 100
    //          """.stripMargin
    //        ).repartition(1).write.option("header", "true").csv("D://test")
    //        val ss = spark.sql(
    //          """
    //            |select good_id,priceText,sellCount,salesAmount from t1 where good_id = '604830095942216d0f13c34b5e7c992a2887853a598b4567'
    //            |""".stripMargin).show(false)
    //          .repartition(1).write.orc("s3a://o2o-dataproces-group/zyf/livestreaming/kuaishou/sourceData/2021/7/kuaishou_webcast_shop_list_21071/")

  }


}
