package a_o2odata_deal

import com.alibaba.fastjson.{JSON, JSONObject}
import com.mongodb.spark.MongoSpark
import org.apache.spark.sql.SparkSession

object MonGo_2_local_address {

  def main(args: Array[String]): Unit = {
    val clustName = "hdfs://192.168.2.41:9000"

    /** *******重要  修改：year、month、platform_Name、names 、TimeStamp  *******************/

    //===============================================
    val year = "2022"
    val month = "11"
    val hdfs_188 = "hdfs://192.168.2.188:9000/"
    val obs = "s3a://"

    //叮咚买菜
    //    val names = "Address.ddmc_address_all"
    //    val platform = "dingdongmc"

    //融易购
    //    val names = "Address.icbc_address_2112"
    //    val platform = "icbc"

    //喵街
    //    val names = "Address.intime_address_all"
    //    val platform = "intime"

    //    途牛
    //    val names = "Address.tuniu_ticket_address_all"
    //    val platform = "tuniu"

    //快手
    //    val names = "Address.kuaishou_address_all"
    //    val platform = "kuaishou"

    //朴朴超市
    val names = "Address.pupu_address_all"
    val platform = "PupuChaoShi"
    /** *******************************************************/
    val strings = names.replace(".", ",").split(",")
    val database = strings(0)
    val collection = strings(1)

    // 每月新增路径
    val resultSave2OBS = obs + s"o2o-dimension-table/address_table/address_source_data_${year}/address_platform_newAdd/${year}_${month}/${platform}_address_${year}_${month}"

    /** *************以下代码不动 *******************************/
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("MonGo_To_local")
      .config("spark.mongodb.input.uri", "mongodb://root:O2Odata123!@ 192.168.6.83:27017/admin")
      .config("spark.mongodb.input.database", s"${database}")
      .config("spark.mongodb.input.collection", s"${collection}")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("WARN")

    // 加载mongodb配置信息
    val rdd = MongoSpark.load(sc)
    val values = rdd.map(line => {
      val nObject: JSONObject = JSON.parseObject(line.toJson())
      nObject.remove("_id")
      nObject.toString
    })

    spark.read.json(values).write.mode("overwrite").json(resultSave2OBS)

    sc.stop()

  }
}

