package com.o2o.cleaning.month.platform.ebusiness_plat.mt

import com.alibaba.fastjson.{JSON, JSONObject}
import com.mongodb.spark.MongoSpark
import com.o2o.utils.Iargs
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/6/5 16:23
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object CheckMTData {
  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName(s"${this.getClass.getSimpleName}")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")
      .config("spark.mongodb.input.uri", "mongodb://ob:O2Odata123!@ 192.168.0.56:27017/admin")
      .config("spark.mongodb.input.database", s"MT")
      .config("spark.mongodb.input.collection", s"meituan_waimai_detail_2008_bu")
      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", Iargs.OBSACCESS)
    sc.hadoopConfiguration.set("fs.s3a.secret.key", Iargs.OBSSECRET)
    sc.hadoopConfiguration.set("fs.s3a.endpoint", Iargs.OBSENDPOINT)
    sc.setLogLevel("WARN")

    val rdd = MongoSpark.load(sc)

    val value: RDD[String] = rdd.map(lines => {
      val nObject: JSONObject = JSON.parseObject(lines.toJson())
      nObject.remove("_id")
      nObject.toString
    })
//    value

    val frame: DataFrame = spark.read.json(value)

    frame.registerTempTable("tab")

    spark.sql(
      """
        |select
        |sum(sellCount),
        |sum(sellCount*priceText),
        |shopId,
        |shopName,
        |province,
        |city,
        |county,
        |regional_id,
        |address
        |from
        |tab
        |group by shopId,shopName,province,city,county,regional_id,address
      """.stripMargin).show(false)

  sc.stop()
}
}
