package LogServer.spark.sql

import LogServer.constants.PropertiesMongo
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SQLContext}

/**
  * Created by root on 17-2-14.
  */
object SqlContext {
  /**
    * 创建SparkContext
    * @param sparkContext
    * @return
    */
  private def createContext(sparkContext: SparkContext): SQLContext = {
    val ctx = new SQLContext(sparkContext)
    ctx
  }

  /**
    * 将指定collection加载到Spark-SQL中
    * @param sparkContext
    * @param collectionName
    * @return
    */
  def loadCollection(sparkContext: SparkContext, collectionName: String): DataFrame = {
    val ctx = createContext(sparkContext)
    val collection_loaded = loadCollection(ctx,collectionName)
    collection_loaded
  }

  /**
    * 将指定collection加载到Spark-SQL中
    * @param sqlContext
    * @param collectionName
    * @return
    */
  def loadCollection(sqlContext: SQLContext, collectionName: String): DataFrame = {
    val properties = PropertiesMongo.getAllProperties()
    val map = Map("host"->properties.mongoUrl,
      "database"->properties.mongoDbName,
      "collection"->collectionName,
      "credentials"->Array(properties.mongoUser,properties.mongoDbName,properties.mongoPwd)
        .mkString(","))
    val collection_loaded = sqlContext.read.format("com.stratio.datasource.mongodb").options(map).load()
    collection_loaded
  }
}


