package com.o2o.cleaning.month.platform.ebusiness_plat.alibaba

import com.alibaba.fastjson.{JSON, JSONObject}
import com.mongodb.spark.MongoSpark
import com.mongodb.spark.config.ReadConfig
import com.o2o.utils.Iargs
import org.apache.spark.sql.SparkSession

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2019/3/22 16:42
  * @ Param:  $param$
  * @ Description: 旅游类数据拉取
  *
  */
object MongoDBData {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName(s"${this.getClass.getSimpleName}")
      .config("spark.debug.maxToStringFields", "500")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")
//      .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", Iargs.OBSACCESS)
    sc.hadoopConfiguration.set("fs.s3a.secret.key", Iargs.OBSSECRET)
    sc.hadoopConfiguration.set("fs.s3a.endpoint", Iargs.OBSENDPOINT)
    sc.setLogLevel("WARN")


    /** **********       重要： readUri、writeUri       ****************/
    val readUri = "mongodb://root:O2Odata123!@ 192.168.0.149:27017/admin"
    //    val readUri = "mongodb://root:O2Odata123!@ 192.168.0.58:27017/admin"


    val year = Iargs.YEAR
    val month = Iargs.MONTH
    val timeStamp = Iargs.TIMESAMP

    val obs = "s3a://"

    val platName = "Alibaba"

    val resultUrl = obs + s"o2o-sourcedata/obs-source-${year}/${month}/${platName}/${platName}_${year}_${month}"

    val rdd = MongoSpark.load(sc, ReadConfig(
      Map(
        "uri" -> s"${readUri}",
        "database" -> "Alibaba",
        "collection" -> s"alibaba_detail_200${month}"
      )))

    val values = rdd.map(line => {
        val nObject: JSONObject = JSON.parseObject(line.toJson())
        nObject.remove("_id")
        nObject.put("timeStamp", timeStamp)
        nObject.toString
    })

    println(s"-----------阿里巴巴${month}数据拉取--------------")
    values.repartition(5).saveAsTextFile(resultUrl)
    println("---------------------end---------------------")


    //--------------------拉取地址-------------------------
    val address_rdd = MongoSpark.load(sc, ReadConfig(
      Map(
        "uri" -> s"${readUri}",
        "database" -> "Address",
        "collection" -> "alibaba_address_all"
      )))

    val address_values = address_rdd.map(line => {
      val nObject: JSONObject = JSON.parseObject(line.toJson())
      nObject.remove("_id")
      nObject.toString
    })

    val addressUrl = s"s3a://o2o-dimension-table/address_table/address_source_data_2020/address_platform_newAdd/2020_${month}/${platName.toLowerCase}_address_2020_${month}"
    address_values.repartition(1).saveAsTextFile(addressUrl)


    sc.stop()

  }

}
