import com.o2o.utils.Iargs
import org.apache.spark.sql.SparkSession

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/6/5 16:23
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object CheckDZDPObsData {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName(s"${this.getClass.getSimpleName}")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")
      .config("es.nodes", "192.168.1.29")
      .config("es.port", "9200")
      .config("cluster.name","O2OElastic")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", Iargs.OBSACCESS)
    sc.hadoopConfiguration.set("fs.s3a.secret.key", Iargs.OBSSECRET)
    sc.hadoopConfiguration.set("fs.s3a.endpoint", Iargs.OBSENDPOINT)
    sc.setLogLevel("WARN")
    var year ="2020"
    var month = "8"
    var platform = "dzdp"
    var index247 = s"2020_dazhongdp_${month}/type_1"
    var address = s"s3a://o2o-dimension-table/address_table/address_table_2020/${month}/address_platform/dzdp_address_2020_${month}/*"

    //good路径
    var goodPath = s"s3a://o2o-dataproces-group/zsc/${year}/${month}/${platform}/good/"
    var lastgoodPath = s"s3a://o2o-dataproces-group/zsc/${year}/${month.toInt -1}/${platform}/good/"
    spark.read.json(goodPath).registerTempTable("tab")
    spark.read.json(lastgoodPath).registerTempTable("lasttab")

    println("本月")
    spark.sql(
      """
        |
        |select
        |count(1) ct,
        |count(distinct shopId),
        |sum(salesAmount),
        |sum(sellCount)
        |from
        |tab
      """.stripMargin).show()
    println("上月")
    spark.sql(
      """
        |
        |select
        |count(1) ct,
        |count(distinct shopId),
        |sum(salesAmount),
        |sum(sellCount)
        |from
        |lasttab
      """.stripMargin).show()
    println("省为0的条数")
    spark.sql(
      """
        |
        |select
        |count(1) ct
        |from
        |tab
        |where province='0'
      """.stripMargin).show()
    println("10028美食分类的量")
    spark.sql(
      """
        |
        |select
        |count(1) ct,
        |sum(salesAmount),
        |sum(sellCount)
        |from
        |tab
        |where firstCategoryId='10028'
      """.stripMargin).show()
    println("所有分类的量")
    spark.sql(
      """
        |
        |select
        |count(1) ct,
        |sum(salesAmount),
        |sum(sellCount),
        |firstCategoryId
        |from
        |tab
        |group by firstCategoryId
        |order by ct desc
      """.stripMargin).show()
  sc.stop()
}
}
