package com.cw.recommend.common.util

import com.cw.recommend.common.constant.{MONGODB_DATA_SOURCE, MONGODB_URI}
import com.mongodb.casbah.{MongoClient, MongoClientURI}
import org.apache.spark.sql.{DataFrame, SparkSession}

object MongoDBUtil {
  out =>

  case class MongoDBConnConf(uri: String, db: String)

  implicit class MongoDBSyntax(df: DataFrame) {
    def sinkMongoDB(collectionName: String, mode: String = "overwrite") =
      out.sinkMongoDB(df, collectionName, mode)

  }


  def sinkMongoDB(df: DataFrame, collectionName: String, mode: String) = {
    df.write
      .option("uri", MONGODB_URI)
      .option("collection", collectionName)
      .format(MONGODB_DATA_SOURCE)
      .mode(mode)
      .save()

  }


  def readMongoDB(spark: SparkSession, collectionName: String): DataFrame = {
    spark.read
      .option("uri", MONGODB_URI)
      .option("collection", collectionName)
      .format(MONGODB_DATA_SOURCE)
      .load()
  }


  def getClient: MongoClient = {
    MongoClient(MongoClientURI(MONGODB_URI))
  }


  def clientContext[T](f: MongoClient=>T): T = {
    val cli = getClient
    val res = f(cli)
    cli.close()
    res
  }


}
