package a_o2odata_deal

import com.alibaba.fastjson.{JSON, JSONObject}
import com.mongodb.spark.MongoSpark
import com.o2o.utils.Iargs
import org.apache.spark.sql.SparkSession

object MongoData2OBS {

  def main(args: Array[String]): Unit = {
    val clustName = "hdfs://192.168.2.41:9000"

    /** *******重要  修改：year、month、platform_Name、names 、TimeStamp  *******************/

    /** *******  travel ***********************/

    /** *******tmall jd  suning elme meituan_wm  meituan_tg dzdp ***********************/

//            val names = "Address.suning_address_1902"
    //        val names = "Address.tmall_address_1810"
    //        val names = "Address.tmall_address_181111_bu"
    //            val names = "Address.tmall_address_1901"
    //            val names = "Address.tmall_address_all"
//                val platform = "tmall"

    //        val names = "Address.hmxs_address_all"
    //        val names = "Address.hmxs_address_1812"
    //        val platform = "hmxs"

//            val names = "Address.hmxs_address_all"
//            val names = "Address.hmxs_address_1902"
//            val platform = "hmxs"
//   val names = "Address.qichezhijia_address_1905"
//            val names = "Address.hmxs_address_1902"
//            val platform = "qichezhijia"


//            val names = "Address.icbc_address_all"
//            val platform = "icbc"

    //        val names = "Address.jd_address_self_180928"
    //        val platform = "jd_address_self_180928"
//                val names = "Address.jd_address_all"
//                val names = "Address.jd_address_1902"
//                val names = "Address.jd_address_1903"
//                val platform = "jd"

//            val names = "Address.suning_address_1902"
    //            val names = "Address.suning_address_1901"
    //            val names = "Address.suning_address_all"
//                val platform = "suning"

//                val names = "Address.dzdp_address_all"
//                val names = "Address.dzdp_address_1902"
//                val platform = "dzdp"

    //                val names = "Address.meituan_tg_address_all"
    //                    val names = "Address.meituan_tg_address_1901"
    //                val platform = "meituan_tg"

//    val names = "Address.meituan_address_all"
    val names = "Address.tuniu_ticket_address_2008"
    val platform = "tuniu"
    //                val names = "Address.addr_nodistrict_190315"

    //    val names = "Address.eleme_address_all_201811_md5"
    //    val names = "Address.eleme_address_1903_1904_md5"
//        val names = "Address.elm_no_md5shop_1905"
//        val names = "Address.eleme_address_1907"
//        val platform = "elm"



    val year = 2020
    val month = 8
    val hdfs_188 = "hdfs://192.168.2.188:9000/"
    val obs = "s3a://"


    /** *******************************************************/
    val strings = names.replace(".", ",").split(",")
    val database = strings(0)
    val collection = strings(1)

    // 每月新增路径
    val resultSave2OBS = obs +
      s"o2o-dimension-table/address_table/address_source_data_${year}/address_platform_newAdd/${year}_${month}/${platform}_address_${year}_${month}"


    /** *************以下代码不动 *******************************/
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("MonGo_To_local")
      .config("spark.mongodb.input.uri", "mongodb://root:O2Odata123!@ 192.168.0.149:27017/admin")
      .config("spark.mongodb.input.database", s"${database}")
      .config("spark.mongodb.input.collection", s"${collection}")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", Iargs.OBSACCESS)
    sc.hadoopConfiguration.set("fs.s3a.secret.key", Iargs.OBSSECRET)
    sc.hadoopConfiguration.set("fs.s3a.endpoint", Iargs.OBSENDPOINT)
    sc.setLogLevel("WARN")

    // 加载mongodb配置信息
    val rdd = MongoSpark.load(sc)
    val values = rdd.map(line => {
      val nObject: JSONObject = JSON.parseObject(line.toJson())
      nObject.remove("_id")
      nObject.toString
    })

    values.repartition(1).saveAsTextFile(resultSave2OBS)

    sc.stop()

  }


}

