package com.o2o.cleaning.month.platform.ebusiness_plat.rongegou

import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
import org.elasticsearch.spark._

object CheckDataDetail {


  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName("CheckDataDetail")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")
      .config("es.nodes", "192.168.1.29")
      .config("es.port", "9200")
      .config("cluster.name", "O2OElastic")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("WARN")

    val year = 2021
    val month = 6
    val platform = "rongyigou"
    var collection = "rongyigou_2106"
    spark.read.orc("s3a://o2o-dataproces-group/zyf/icbc/essource/2022/01/").registerTempTable("t1")
    spark.sql(
      """
        |select * from t1 where good_id = '9002758703'
        |""".stripMargin).show(false)
    //    spark.sql(
    //      """
    //        |select date_format(CURRENT_DATE,'yyyyMMdd')
    //        |""".stripMargin).show(false)
    //    spark.read.json("s3a://o2o-dimension-table/category_table/cate/cate0401/rongyigou/subCategoryId/*")
    //      .repartition(1).write.option("header", "true").csv("D:\\test")

    //    spark.read.orc(s"s3a://o2o-dataproces-group/zyf/icbc/essource/2020/08/").registerTempTable("t1")
    //    spark.sql(
    //      """
    //        |select subCategoryId,subCategoryName,count(1) from t1 where thirdCategoryId = '100210302' group by subCategoryId,subCategoryName
    //        |""".stripMargin).show(false)
    //spark.read.option("header", true).csv(s"s3a://o2o-dataproces-group/zyf/zishengtang/").write.orc(s"s3a://o2o-dataproces-group/zyf/zishengtang_new/")
    //    fun(spark, sc, 2021, 9)


    //  var sourcePath = s"s3a://o2o-sourcedata-2021/obs-source-2021/2021/${month}/${platform}/${collection}"
    //  var resultPath = s"s3a://o2o-dataproces-group/zyf/2021/${month}/${platform}/good_final/"
    //    val frame = spark.read.json(s"s3a://o2o-sourcedata-2021/obs-source-2021/7/rongyigou/rongyigou_2021_7/")
    //      .repartition(1).write.orc(s"s3a://o2o-dataproces-group/zyf/rongyigou/mongosource/2021/07/")
    //    spark.read.orc(s"s3a://o2o-dataproces-group/zyf/rongyigou/mongosource/2020/01/")
    //    spark.read.orc(s"s3a://o2o-dataproces-group/zyf/rongyigou/mongosource/2020/12/")
    //      .registerTempTable("t1")
    //      .where("good_id != '128320438' and good_id != '116344482'")
    //      .repartition(1).write.orc(s"s3a://o2o-dataproces-group/zyf/rongyigou/test/")

    //    spark.sql("select * from t1 where good_id = '116344482'" )
    //      .show(false)
    //      .coalesce(1).write.option("header", "true").json("D://test_all")


    //        val months = Array("01", "02", "03", "04", "05")
    //        val months = Array("07", "08", "09")
    //    val months = Array("10", "11", "12")
    //    for (month <- months) {
    //      orcToJson(spark, sc, 2018, month.toInt)
    //    }
    //    spark.read.orc(s"s3a://o2o-dataproces-group/zyf/icbc/essource/2021/08/").registerTempTable("t1")
    //
    //    spark.sql(
    //      """
    //        |select count(1) count, cast(sum(sellCount) as bigInt) sell,cast(sum(salesAmount) as decimal(20,2)) sale from t1
    //          """.stripMargin
    //    ).show(false)
    //    val ss = sparksess.sql(
    //      """
    //        |select * from t1 where sellCount >0  order by sellCount desc
    //        |""".stripMargin).coalesce(1).write.option("header", "true").csv("D://test_all")
    //        fun(spark, sc, 2021, 10)
  }

  def fun(spark: SparkSession, sc: SparkContext, year: Int, month: Int) {
    val index = s"${year}_rongyigou/rongyigou_${year}_${month}"
    println("----------" + index + "----------")
    val data = sc.esJsonRDD(index).values
    spark.read.json(data).repartition(1).write.orc(s"s3a://o2o-dataproces-group/zyf/icbc/essource/${year}/${month}/")
  }

  def jsonToorc(spark: SparkSession, sc: SparkContext, year: Int, month: Int): Unit = {
    println(s"${year}---${month}")
    val frame = spark.read.json(s"s3a://o2o-dataproces-group/zyf/rongyigou/mongosource/${year}/${month}/")
      .repartition(1).write.orc(s"s3a://o2o-dataproces-group/zyf/rongyigou/mongoORCsource/${year}/${month}/")
  }
}
