package com.fudian.spark_platform.DBConnector

import com.fudian.spark_platform.Configure.{AppSettingConfig, DataSourceInputConfig}
import com.mongodb.spark.MongoSpark
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{SparkSession, DataFrame}

class MongoConnector(a:AppSettingConfig ,c: DataSourceInputConfig) {

    var MongoHost: String = c.inputConfig("DataSourceHost").toString
    var MongoPort: String = c.inputConfig("DataSourcePort").toString
    var MongoDatabase: String = c.inputConfig("DataSourceDBName").toString
    var MongoCollection: String = c.inputConfig("DataSourceDBTable").toString
    var MongodbURI: String = c.inputConfig("DataSourceDBUri").toString

    var spark:SparkSession = null

    var appName: String = a.settings("AppName")
    var masterSpark: String = a.settings("SparkMaster")

    /**
      * 设置mongodb的基本配置
      */
    def init() = {
        //组装mongodbURI
        if(this.MongodbURI == ""){
            this.MongodbURI = "mongodb://" + this.MongoHost + ":" + this.MongoPort + "/" + this.MongoDatabase + "." + this.MongoCollection
        }
        //生成spark
        this.spark = SparkSession.builder()
            .appName(this.appName)
            .master(this.masterSpark)
            .config("spark.mongodb.input.uri", this.MongodbURI)
            .config("spark.mongodb.output.uri", this.MongodbURI + "_Out")
            .getOrCreate()
    }


    /**
      * 获取指定的mongodb中的数据
      *
      * @return 数据读取的Rdd
      */
    def getMongoLoad() : DataFrame = {
        Logger.getRootLogger.setLevel(Level.ERROR)
        init()
        MongoSpark.load(this.spark)
    }

    def getSparkInc:SparkSession = {
        this.spark
    }

}

