package com.o2o.cleaning.month.platform.ebusiness_plat.kuaishou

import java.text.SimpleDateFormat
import java.time.LocalDate
import java.util.{Calendar, Date}

import com.alibaba.fastjson.{JSON, JSONObject}
import com.mongodb.spark.MongoSpark
import com.mongodb.spark.config.ReadConfig
import com.mongodb.spark.rdd.MongoRDD
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
import org.bson.Document

object Address_Mongo_to_local {

  private val time: LocalDate = LocalDate.now()
  val year = time.getYear
  val month = time.getMonthValue - 1
  val database = "Address"
  //  val collection = "kuaishou_address_all"
  val collection = if (month < 10) s"kuaishou_address_210${month}" else s"kuaishou_address_21${month}"
  val source_path = s"s3a://o2o-dimension-table/address_table/address_source_data_${year}/address_platform_newAdd/${year}_${month}/kuaishou_address_${year}_${month}/"

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("Kuaishou_webcast")
      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.sql.caseSensitive", "true")
      //            .master("local[*]")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", "GAO7EO9FWKPJ8WFCQDME")
    sc.hadoopConfiguration.set("fs.s3a.secret.key", "LZ0xaHBSYKHaJ9ECDbX9f7zin79UZkXfGoNapRPL")
    sc.hadoopConfiguration.set("fs.s3a.endpoint", "https://obs.cn-north-1.myhuaweicloud.com")
    sc.setLogLevel("ERROR")

    val readConfig = ReadConfig(Map(
      "spark.mongodb.input.uri" -> "mongodb://ob:O2Odata123!@ 192.168.0.149:27017/admin"
      , "spark.mongodb.input.database" -> s"${database}"
      , "spark.mongodb.input.collection" -> s"${collection}"))


    // 加载mongodb配置信息
    val rdd: MongoRDD[Document] = MongoSpark.load(spark.sparkContext, readConfig)

    val values: RDD[String] = rdd.map(line => {

      val nObject: JSONObject = JSON.parseObject(line.toJson())

      nObject.remove("_id")


      nObject.toString
    })
    //    values.repartition(1).saveAsTextFile(source_path)
    spark.read.json(values).repartition(5).write.json(source_path)
  }

}
