package com.etc.offine

import com.etc.offine.OfflineRecommender.MONGODB_RATING_COLLECTION
import org.apache.spark.sql.SparkSession

object Test {
  def main(args: Array[String]): Unit = {
    val config = Map(
      "spark.cores" -> "local[*]",
      "mongo.uri" -> "mongodb://127.0.0.1:27017/recommender",
      "mongo.db" -> "reommender"
    )

    val spark = SparkSession.builder()
      .master(config("spark.cores"))
      .appName("StatisticsRecommender")
      .getOrCreate();


    //创建一个MongoDBConfig
    val mongoConfig = MongoConfig(config("mongo.uri"),config("mongo.db"))

    import spark.implicits._


    //读取mongoDB中的业务数据
    val ratingRDD = spark
      .read
      .option("uri",mongoConfig.uri)
      .option("collection",MONGODB_RATING_COLLECTION)
      .format("com.mongodb.spark.sql")
      .load()
      .as[MovieRating]
      .rdd

    ratingRDD.map(sdfs =>{
      println(sdfs)
      println(sdfs)
    })
  }

}
