package com.xxh.user.recommend

import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SparkSession}

//1::Toy Story (1995)::Animation|Children's|Comedy
case class Movie(mid: Int, mName: String, issue: String, types: String)

//33::3121::3::978108460
case class Rating(uid: Int, mid: Int, score: Int, timestamp: Int)

case class Tag()

case class EsConfig(httpHosts: String, transportHosts: String, index: String, clusterName: String)


object DataLoader {
  val MOVIE_DATA_PATH = "D:\\StudySummer\\recommDemo\\recommender\\DataLoader\\src\\main\\resources\\movies.dat"
  val RATING_DATA_PATH = "D:\\StudySummer\\recommDemo\\recommender\\DataLoader\\src\\main\\resources\\ratings.dat"
  val USER_DATA_PATH ="D:\\StudySummer\\recommDemo\\recommender\\DataLoader\\src\\main\\resources\\users.dat"
  val MONGODB_URI = "mongodb://localhost:27017/movierec"



  def main(args: Array[String]): Unit = {

    val Config = Map(
      "mongo.uri" -> "mongodb://localhost:27017/movierec",
      "mongo.db" -> "movie",
      "es.httpHosts" -> "localhost:9200",
      "es.transportHosts" -> "localhost:9300",
      "es.index" -> "movie",
      "es.cluster.name" -> "elasticsearch"
    )

    //创建sparkConfig
    val sparkConf = new SparkConf()
      .setMaster("local[*]")
      .setAppName("DataLoader")
      .set("spark.mongodb.output.uri", MONGODB_URI);

    //创建SparkSession
    val spark = SparkSession.builder().config(sparkConf).getOrCreate();

    //加载数据
    val movieRDD = spark.sparkContext.textFile(MOVIE_DATA_PATH)
    val ratingRDD = spark.sparkContext.textFile(RATING_DATA_PATH)


    //数据预处理
    val dfs: Array[DataFrame] = initData(movieRDD, ratingRDD, spark)



    //数据存到DB中
    SaveToDB(dfs)



    //数据保存搭配ES中
//    SaveToES(dfs)

  }


  def initData(mv: RDD[String], rt: RDD[String], spark: SparkSession): Array[DataFrame] = {
    import spark.implicits._
    val mvDf = mv.map(item => {
      val data: Array[String] = item.split("::")
      val mid = data(0).toInt
      val name = data(1).substring(0,data(1).length-6)
      val time = data(1).substring(data(1).length-6).dropRight(1).drop(1).trim
      val tags = data(2).trim
      Movie(mid, name, time, tags)
    }).toDF()

    val rtDf = rt.map(item => {
      val data: Array[String] = item.split("::")
      Rating(data(0).toInt, data(1).toInt, data(2).toInt, data(3).toInt)
    }).toDF()

    rtDf.show(5)

    mvDf.show(5)

    return Array(rtDf, mvDf);
  }


  def SaveToES(dfs: Array[DataFrame]): Unit = {

  }

  def SaveToDB(dfs: Array[DataFrame]): Unit = {

    val rtdf = dfs(0)
    rtdf.write
      .option("uri", MONGODB_URI)
      .option("collection", "rating")
      .format("com.mongodb.spark.sql")
      .mode("overwrite")
      .save()

    val mvdf = dfs(1)
    mvdf.write
      .option("uri", MONGODB_URI)
      .option("collection", "movie")
      .format("com.mongodb.spark.sql")
      .mode("overwrite")
      .save()


  }

}
