package com.o2o.cleaning.month.platform.ebusiness_plat.ddmc

import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
import org.elasticsearch.spark._

object CheckDataDetail {


  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName("CheckDataDetail")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")
      .config("es.nodes", "192.168.1.29")
      .config("es.port", "9200")
      .config("cluster.name", "O2OElastic")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("WARN")

    val year = 2021
    val month = 8
    val platform = "dingdongmc"
    val sourcePath = s"s3a://o2o-sourcedata-2021/obs-source-${year}/${month}/${platform}/${platform}_${year}_${month}/"
    val resultPath = s"s3a://o2o-dataproces-group/zyf/${year}/${month}/${platform}/good/"

    //    spark.read.orc(s"s3a://o2o-tempdata/zyf/2021/7/dingdongmc_bak/good/*")
    //            .show(false)
    //      .registerTempTable("t1")
    //    spark.sql(
    //      """
    //        |select count(1),sum(sellCount),sum(salesAmount) from t1
    //        |""".stripMargin).show(false)

    //    spark.read.orc(s"s3a://o2o-tempdata/zyf/2021/7/dingdongmc_bak/good/*")
    //      .repartition(1).write.mode("overwrite").orc(s"s3a://o2o-tempdata/zyf/ddmc/fact/2021/07")
        spark.read.orc(s"s3a://o2o-tempdata/zyf/ddmc/fact/2021/07").show(false)
//    spark.read.orc(s"s3a://o2o-tempdata/zyf/ddmc/fact/2020/07").printSchema()
    //    spark.read.option("header", true).csv("D:\\test").registerTempTable("t1")
    //    spark.sql(
    //      """
    //        |select *,case when cate5 is not null then cate5 when cate4 is not null then cate4 when cate3 is not null then cate3 when cate2 is not null then cate2 when cate1 is not null then cate1 else '999' end asd
    //        |,case when catee5 is not null then catee5 when catee4 is not null then catee4 when catee3 is not null then catee3 when catee2 is not null then catee2 when catee1 is not null then catee1 else '999' end asdd from t1
    //        |""".stripMargin).write.option("header", true).csv("D:\\test1")

    //    spark.sql(
    //      """
    //        |select count(1),sum(sellCount),sum(salesAmount)  from t1 where sellCount > 0
    //        |""".stripMargin).show(false)
    //        spark.sql(
    //          """
    //            |select count(1) count, cast(sum(sellCount) as bigInt) sell,cast(sum(salesAmount) as decimal(20,2)) sale from t1
    //          """.stripMargin
    //        ).show()
    //        val ss = spark.sql(
    //          """
    //            |select good_id,priceText,sellCount,salesAmount from t1 where good_id = '604830095942216d0f13c34b5e7c992a2887853a598b4567'
    //            |""".stripMargin).show(false)
    //          .repartition(1).write.orc("s3a://o2o-dataproces-group/zyf/livestreaming/kuaishou/sourceData/2021/7/kuaishou_webcast_shop_list_21071/")

  }


}
