package com.ywj.recommender

import com.mongodb.casbah.commons.MongoDBObject
import com.mongodb.{MongoClientURI, casbah}
import com.mongodb.casbah.{MongoClient, MongoClientURI}
import org.apache.spark.SparkConf
import org.apache.spark.sql.{DataFrame, SparkSession}

//定义样例类
/**
 * Movie 数据集
 *
 * 260                                         电影ID，mid
 * Star Wars: Episode IV - A New Hope (1977)   电影名称，name
 * Princess Leia is captured and held hostage  详情描述，descri
 * 121 minutes                                 时长，timelong
 * September 21, 2004                          发行时间，issue
 * 1977                                        拍摄时间，shoot
 * English                                     语言，language
 * Action|Adventure|Sci-Fi                     类型，genres
 * Mark Hamill|Harrison Ford|Carrie Fisher     演员表，actors
 * George Lucas                                导演，directors
 *
 */
case class Movie(mid: Int, name: String, descri: String, timelong: String, issue: String,
                 shoot: String, language: String, genres: String, actors: String, directors: String)

/**
 * Rating数据集
 *
 * 1,31,2.5,1260759144  电影id mid；用户id uid；评分 score；时间戳 timestamp
 */
case class Rating(uid: Int, mid: Int, score: Double, timestamp: Int )

/**
 * Tag数据集
 *
 * 15,1955,dentist,1193435061   电影id mid；用户id uid；标签 tag；时间戳 timestamp
 */
case class Tag(uid: Int, mid: Int, tag: String, timestamp: Int)

//把MongoDB的es的配置封装成样例类
/**
 *
 * @param uri MongoDB连接
 * @param db  对应数据库
 */
case class MongoConfig(uri:String, db:String)

/**
 *
 * @param httpHosts       要连接es的主机列表，逗号分隔  一般9200端口
 * @param transportHosts  transport主机列表，逗号分隔  一般9300端口 集群彼此之间数据传输用的
 * @param index           需要操作的索引
 * @param clustername     集群名称，默认elasticsearch
 */
case class ESConfig(httpHosts:String, transportHosts:String, index:String, clustername:String)

//单例对象
object DataLoder {
  //定义常量
  val MOVIE_DADA_PATH="D:\\myjava\\MovieRecommend\\Recommender\\DataLoader\\src\\main\\resources\\movies.csv"
  val RATING_DATA_PATH="D:\\myjava\\MovieRecommend\\Recommender\\DataLoader\\src\\main\\resources\\ratings.csv"
  val TAG_DATA_PATH="D:\\myjava\\MovieRecommend\\Recommender\\DataLoader\\src\\main\\resources\\tags.csv"

  val MONGODB_MOVIE_COLLECTION = "Movie"
  val MONGODB_RATING_COLLECTION = "Rating"
  val MONGODB_TAG_COLLECTION = "Tag"

  def main(args: Array[String]): Unit = {

    val config = Map(
      "spark.cores" -> "local[*]",                            //本地多线程尽可能多，有几个用几个
      "mongo.uri" -> "mongodb://112.74.72.119:27017/recommender",
      "mongo.db" -> "recommender"
    )

    //创建一个sparkconf对象
    val sparkConf = new SparkConf().setMaster(config("spark.cores")).setAppName("DataLoader")

    //创建一个SparkSession
    val spark = SparkSession.builder().config(sparkConf).getOrCreate()

    import spark.implicits._
    //加载数据
    val movieRDD = spark.sparkContext.textFile(MOVIE_DADA_PATH)

    val movieDF = movieRDD.map(
      item=>{
        val attr = item.split("\\^")    //双重转义
        Movie(attr(0).toInt,attr(1).trim,attr(2).trim,attr(3).trim,attr(4).trim,attr(5).trim,attr(6).trim,attr(7).trim,attr(8).trim,attr(9).trim)
      }
    ).toDF()


    val ratingRDD = spark.sparkContext.textFile(RATING_DATA_PATH)

    val ratingDF = ratingRDD.map(item => {
      val attr = item.split(",")
      Rating(attr(0).toInt,attr(1).toInt,attr(2).toDouble,attr(3).toInt)
    }).toDF()

    val tagRDD = spark.sparkContext.textFile(TAG_DATA_PATH)
    //将tagRDD装换为DataFrame
    val tagDF = tagRDD.map(item => {
      val attr = item.split(",")
      Tag(attr(0).toInt,attr(1).toInt,attr(2).trim,attr(3).toInt)
    }).toDF()

    implicit val mongoConfig = MongoConfig(config("mongo.uri"),config("mongo.db"))//隐式参数

    //将数据保存到MongoDB
    storeDataInMongoDB(movieDF,ratingDF,tagDF)

    //数据预处理

    spark.stop()
  }
  def storeDataInMongoDB(movieDF: DataFrame, ratingDF: DataFrame, tagDF: DataFrame)(implicit mongoConfig: MongoConfig): Unit ={
    //新建一个mongodb的连接
    val mongoClient = MongoClient(casbah.MongoClientURI(mongoConfig.uri))

    //如果mongodb中已经有相应的数据库，先删除
    mongoClient(mongoConfig.db)(MONGODB_MOVIE_COLLECTION).dropCollection()
    mongoClient(mongoConfig.db)(MONGODB_RATING_COLLECTION).dropCollection()
    mongoClient(mongoConfig.db)(MONGODB_TAG_COLLECTION).dropCollection()

    //将DF数据写进对应的mongo表中
    movieDF.write.option("uri",mongoConfig.uri).option("collection",MONGODB_MOVIE_COLLECTION).mode("overwrite").format("com.mongodb.spark.sql").save()
    ratingDF.write.option("uri",mongoConfig.uri).option("collection",MONGODB_RATING_COLLECTION).mode("overwrite").format("com.mongodb.spark.sql").save()
    tagDF.write.option("uri",mongoConfig.uri).option("collection",MONGODB_TAG_COLLECTION).mode("overwrite").format("com.mongodb.spark.sql").save()

    //对数据表建索引
    mongoClient(mongoConfig.db)(MONGODB_MOVIE_COLLECTION).createIndex(MongoDBObject("mid" -> 1))
    mongoClient(mongoConfig.db)(MONGODB_RATING_COLLECTION).createIndex(MongoDBObject("uid" -> 1))
    mongoClient(mongoConfig.db)(MONGODB_RATING_COLLECTION).createIndex(MongoDBObject("mid" -> 1))
    mongoClient(mongoConfig.db)(MONGODB_TAG_COLLECTION).createIndex(MongoDBObject("uid" -> 1))
    mongoClient(mongoConfig.db)(MONGODB_TAG_COLLECTION).createIndex(MongoDBObject("mid" -> 1))

    mongoClient.close()
  }



}
