package com.o2o.cleaning.month.platform.ebusiness_plat.kuaishou

import org.apache.spark.sql.SparkSession

object CheckData_MonthDetail {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("KuaiShou_LiveStreaming")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")
      .config("es.nodes", "192.168.1.29")
      .config("es.port", "9200")
      .config("cluster.name", "O2OElastic")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("ERROR")
    val year = 2022
    val month = 616
    //    spark.read.orc(s"s3a://o2o-dataproces-group/zyf/livestreaming/kuaishou/dws/${year}/${month}").registerTempTable("t1")
    //    spark.sql(
    //      """
    //        |select count(1),sum(sellcount),sum(salesamount) from t1
    //        |""".stripMargin).show(false)

    // 从阿里拉下来数据打华为分类地址品牌

    spark.read.orc(s"s3a://o2o-dataproces-group/zyf/livestreaming/kuaishou/es/${year}/${month}/").registerTempTable("t1")
    spark.sql(
      """
        |select count(1),sum(sellCount),sum(salesAmount) from t1
        |""".stripMargin).show(false)

    //        spark.read.orc(s"s3a://o2o-dataproces-group/zyf/livestreaming/kuaishou/dws_to_dws/${year}/${month}/").registerTempTable("t1")
    //        spark.sql(
    //          """
    //            |select count(1),sum(sellcount),sum(salesamount) from t1
    //            |""".stripMargin).show(false)

    //    spark.read.option("header", true).csv(s"D:\\o2o4zyfBywork\\需求\\快手直播\\导回dws\\${year}${month}").registerTempTable("t1")
    //    spark.sql(
    //      """
    //        |select count(1),sum(sellCount),sum(salesAmount) from t1
    //        |""".stripMargin).show(false)
    //    val timestamp: Array[Int] = Array(1643472000)
    //    println(timestamp(0))
  }
}
