package com.fudian.spark_platform.DBConnector
import com.mongodb.spark.MongoSpark
import com.mongodb.spark.config.{WriteConfig, ReadConfig}
import com.mongodb.spark.rdd.MongoRDD
import org.apache.log4j.{Level, Logger}
import org.apache.spark.api.java.JavaRDD
import org.apache.spark.rdd.RDD
import org.bson.Document

class MongoConnector(s : MongoConnector) extends DBBase {

    var MongoHost: String = "175.102.18.112"
    var MongoPort: scala.Int = 27018
    var MongoDatabase: String = "OSOpenCollege"
    var MongoCollection: String = "ICDE"
    var MongodbURI: String = ""

    /**
      * 构造函数
      */
    def this() = {
        this(null)
        init()
    }

    def setMongoHost(value:String): Unit ={
        this.MongoHost = value
    }

    def setMongoPort(value:scala.Int): Unit ={
        this.MongoPort = value
    }

    def setMongoDBName(name:String): Unit ={
        this.MongoDatabase = name
    }

    def setMongoCollection(collection:String): Unit ={
        this.MongoCollection = collection
    }

    /**
      * 设置mongodb的基本配置
      *
      * @param mongoHost host
      * @param mongoPort 端口
      * @param mongoDatabase 数据库名
      * @param mongoCollection 集合名
      * @return
      */
    def init(mongoHost: String = this.MongoHost,
                 mongoPort: scala.Int = this.MongoPort,
                 mongoDatabase: String = this.MongoDatabase,
                 mongoCollection: String = this.MongoCollection) = {
        //组装mongodbURI
        this.MongodbURI = "mongodb://" + mongoHost + ":" + mongoPort.toString + "/" + mongoDatabase + "." + mongoCollection
        this.conf = this.conf.set("spark.mongodb.input.uri", MongodbURI)
            .set("spark.mongodb.output.uri", MongodbURI + "_out")
    }


    /**
      * 获取指定的mongodb中的数据
      *
      * @return 数据读取的Rdd
      */
    def getMongoLoad() : MongoRDD[Document] = {
        Logger.getRootLogger.setLevel(Level.ERROR)
        val readConfig = ReadConfig(Map("readPreference.name" -> "secondaryPreferred"), Some(ReadConfig(this.sc)))
        val mongoData = MongoSpark.load(this.sc, readConfig = readConfig)
        mongoData
    }

//    def writeToDB(rDD: RDD[D]) = {
//        MongoSpark.save(rDD)
//    }


}

