package a_aa_amainpackage.lvyou_clear.lv_update

import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
import org.elasticsearch.spark._

object CheckDataDetail {


  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName("CheckDataDetail")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")
      .config("es.nodes", "192.168.1.29")
      .config("es.port", "9200")
      .config("cluster.name", "O2OElastic")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("WARN")

    val year = 2022
    val month = 6

    //    val result = s"s3a://o2o-sourcedata/obs_result_data/2021/11/"
    //    val result = s"s3a://o2o-dataproces-group/shi_xinwang/tourism/${year}/${month}/tuniu_ly/"
    val result = s"s3a://o2o-dataproces-group/shi_xinwang/tourism/${year}/${month}/tuniu_mp/"
    //    val result = s"s3a://o2o-dataproces-group/shi_xinwang/tourism/${year}/${month}/tuniu_yl/"
    //    spark.read.json(result).printSchema()
    spark.read.json(result).show(10)
//      .registerTempTable("t1")
    //    spark.sql(
    //      """
    //        |select count(1),sum(sellCount),sum(salesAmount) from t1
    //        |""".stripMargin).show(false)
    //    spark.sql(
    //      """
    //        |select count(*),sum(sellCount),sum(salesAmount),sum(salesAmount_origin),sum(salesAmount_old) from t1
    //        |""".stripMargin).show(false)
    //    spark.sql(
    //      """
    //        |select good_id,count(1) from t1 group by good_id having count(1) > 1
    //        |""".stripMargin).show(false)
//    spark.sql(
//      """
//        |select good_id,sellCount,salesAmount,title from t1 order by salesAmount desc
//        |""".stripMargin).show(false)


  }
}
