package com.o2o.cleaning.month.platform.ebusiness_plat.zaixianjy_yl.yiliao

import org.apache.spark.sql.SparkSession

object CheckDataDetail {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName("CheckDataDetail")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")
      .config("cluster.name", "O2OElastic")
      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("WARN")

    val result = s"s3a://o2o-tempdata/zyf/2022/8/weiyi_whenzhen/"
    spark.read.orc(result)
//      .show(100)
          .registerTempTable("t1")

        spark.sql(
          """
            |select count(1),sum(sellCount),sum(salesAmount) from t1
            |""".stripMargin)
          .show(false)
    //      .coalesce(1).write.option("header", true).csv(s"D:\\test")

  }
}