import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SparkSession}

//封装起来recommender.properties的数据
object Constant {
  //通过调用Properties类中的getProperties(key)方法==>里加载读取了recommender.properties文件
  //三张表名
  val MOVIE_COLLECTION = Properties.getProperties("movie.collection")
  val RATING_COLLECTION = Properties.getProperties("rating.collection")
  val TAG_COLLECTION = Properties.getProperties("tag.collection")
  //本地三张表地址
  val MOVIE_PATH = Properties.getProperties("movie.path")
  val RATING_PATH = Properties.getProperties("rating.path")
  val TAG_PATH = Properties.getProperties("tag.path")

  //Spark SQL 查到的四条 结果存入定义的表里
  val RATE_MORE_MOVIES = Properties.getProperties("RATE_MORE_MOVIES")
  val RATE_MORE_RECENTLY_MOVIES = Properties.getProperties("RATE_MORE_RECENTLY_MOVIES")
  val AVERAGE_MOVIES = Properties.getProperties("AVERAGE_MOVIES")
  val GENRES_TOP_MOVIES = Properties.getProperties("GENRES_TOP_MOVIES")

  val SPARK_CORES =Properties.getProperties("spark.cores")
  val MONGO_URL=Properties.getProperties("mongo.uri")
  val MONGO_DB=Properties.getProperties("mongo.db")

  //USER_RECS=UserRecs
  val USER_RECS = Properties.getProperties("USER_RECS")
  //MOVIE_RECS=MovieRecs
  val  MOVIE_RECS = Properties.getProperties("MOVIE_RECS")
  //STREAM_RECS_COLLECTION=StreamRecs

  //初始化环境的封装
  def initEnv():SparkSession ={
    //1.初始化环境
     val sparkConf: SparkConf = new SparkConf().setAppName("DataLoader").setMaster(Constant.SPARK_CORES)
    //1.1创建sc
    SparkSession.builder().config(sparkConf).getOrCreate()

  }
  //封装三张表写入MongoDB的过程
  //df:DataFrame  collectionName MongoDB表名
  def DataIntoMongoDB(df:DataFrame,collectionName:String)={
    df.write
      .option("uri",Constant.MONGO_URL)
      .option("collection",collectionName)
      .mode("overwrite")
      .format("com.mongodb.spark.sql")
      .save()
  }
}
