import com.o2o.utils.Iargs
import com.o2o.utils.times.TimesYearAll
import org.apache.spark.sql.SparkSession

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/6/5 16:23
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object CheckELMObsData {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName(s"${this.getClass.getSimpleName}")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")
      .config("es.nodes", "192.168.1.29")
      .config("es.port", "9200")
      .config("cluster.name","O2OElastic")
      .config("es.net.http.auth.user", "elastic")
      .config("es.net.http.auth.pass", "changeme")
      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", Iargs.OBSACCESS)
    sc.hadoopConfiguration.set("fs.s3a.secret.key", Iargs.OBSSECRET)
    sc.hadoopConfiguration.set("fs.s3a.endpoint", Iargs.OBSENDPOINT)
    sc.setLogLevel("WARN")

    //平台名称
    var platform = "dingdongmc"
    //当月的月份
    var month = 8
    var lastMonth = 7
    //每个月固定时间戳
    var year = 2020
    var timeStamp = TimesYearAll.TIME202008

    //MongoDB源数据路径
    val sourcePath = s"s3a://o2o-sourcedata/obs-source-${year}/${month}/${platform}/${platform}_${year}_${month}/"

    val index = "2020_dingdongmc_7/type_1"


    val goodPath = "s3a://dws-data/g_data/2020/8/dazhongdp/"

    spark.read.orc(goodPath).registerTempTable("tab")

    /*spark.sql(
      """
        |select
        |count(1),
        |count(distinct shopId),
        |sum(sellCount),
        |sum(salesAmount)
        |from
        |tab
      """.stripMargin).show(false)

    spark.sql(
      """
        |select
        |count(1)
        |from
        |tab
        |where firstCategoryId='10099'
      """.stripMargin).show(false)

    spark.sql(
      """
        |select
        |town,
        |count(1)
        |from
        |tab
        |group by town
        |order by ct desc
      """.stripMargin).show(false)

    spark.sql(
      """
        |select
        |aedzId,
        |count(1) ct
        |from
        |tab
        |where city in ('苏州市','湛江市','武汉市','锡林郭勒盟','威海市','宿迁市','烟台市')
        |group by aedzId
        |order by ct desc
      """.stripMargin).show(false)*/

    spark.sql(
      """
        |select
        |timeStamp
        |from
        |tab
        |group by timeStamp
      """.stripMargin).show(false)

  sc.stop()
}
}
