package com.o2o.cleaning.month.platform.ebusiness_plat.taobao

import com.alibaba.fastjson.{JSON, JSONObject}
import com.mongodb.spark.MongoSpark
import com.mongodb.spark.config.ReadConfig
import com.mongodb.spark.rdd.MongoRDD
import com.o2o.utils.Iargs
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.bson.Document

/**
  * @ Auther: o2o-rd-0008
  * @ Date:   2020/12/1 18:43
  * @ Param:  ${PARAM}
  * @ Description: 
  */
object JdNanzhaung {

  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName(s"${this.getClass.getSimpleName}")
//      .config("spark.debug.maxToStringFields", "2000")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .getOrCreate()

    val sc = spark.sparkContext
    sc.hadoopConfiguration.set("fs.s3a.access.key", Iargs.OBSACCESS)
    sc.hadoopConfiguration.set("fs.s3a.secret.key", Iargs.OBSSECRET)
    sc.hadoopConfiguration.set("fs.s3a.endpoint", Iargs.OBSENDPOINT)
    sc.setLogLevel("WARN")

    val readUri = "mongodb://ob:O2Odata123!@ 192.168.0.203:27017/admin"

    val readDatabase = "Jingdong"
    val readCollection = "jd_nanzhuang_detail_2011"

    try {
    val value: MongoRDD[Document] = MongoSpark.load(sc,ReadConfig(Map("uri" -> readUri, "database" -> readDatabase, "collection" -> readCollection)))


      val rdd: RDD[String] = value.map(line => {
        val nObject: JSONObject = JSON.parseObject(line.toJson())
        nObject.remove("_id")
        nObject.toString
      })

      val frame: DataFrame = spark.read.json(rdd).cache()
      frame.write.orc("s3a://o2o-dataproces-group/zsc/jd/test/2/")
    }catch {
      case e:Exception=>{
        println(e)
      }
    }


  }

}
