package edu.qdu.dataload

import com.mongodb.casbah.commons.MongoDBObject
import com.mongodb.casbah.{MongoClient, MongoClientURI}
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

/**
 * 商品数据、用户商品评分数据装载到MongoDB中的Recommender库下面
 */

case class MongoConfig(url:String,db:String)
//封装商品数据
case class Product(productId:Int,name:String,imageUrl:String,categories:String,tags:String)
//封装用户商品评分数据
case class Rating(userId:Int,productId:Int,rating:Double,timestamp:Long)

/**
 * 加载数据的核心方法
 */
object DataLoader {
  //集合常量
  val RATTING_COLLECTION = "Rating"
  val PRODUCT_COLLECTION = "Product"

  def main(args: Array[String]): Unit = {
    //Mongo的连接配置
    val configMap = Map(
      "url" -> "mongodb://spark01:27017/recommender",
      "db" -> "recommender"
    )

    //定义一个隐士参数
    implicit val config = MongoConfig(configMap.getOrElse("url", ""), configMap.getOrElse("db", ""))

    //获取SparkSQL的上下文
    val spark = SparkSession.builder()
      .appName("dataloader")
      .master("local[*]") //如果往服务器部署，需要修改一下即可
      .getOrCreate()

    //引入SparkSQL的隐士转换
    import spark.implicits._
    //构建ProductDF
    val productDF = spark.sparkContext.textFile("D:/bigdata_projects/hn-recommender/src/main/resources/products.csv")
      .filter(_.length > 0)
      .map(line => {
        //将每行数据按照^进行拆分
        val fileds = line.split("\\^")
        //封装数据
        Product(fileds(0).trim.toInt, fileds(1).trim, fileds(4).trim, fileds(5).trim, fileds(6).trim)
      }
      )
      .toDF()
    productDF.show()

    //构建RatingDF
    val ratingDF = spark.sparkContext.textFile("D:/bigdata_projects/hn-recommender/src/main/resources/ratings.csv")
      .filter(_.length > 0)
      .map(line => {
        //将每行数据按照^进行拆分
        val fileds = line.split("\\,")
        //封装数据
        Rating(fileds(0).trim.toInt, fileds(1).trim.toInt, fileds(2).trim.toDouble, fileds(3).trim.toLong)
      }
      )
      .toDF()
    ratingDF.show()

    //将productDF和ratingDF写入MongoDB中
    writeDataToMongoDB(productDF,ratingDF)

    //关闭spark
    spark.stop()
  }

  /**
   * 将productDF和ratingDF写入数库的集合中
   * @param spark
   * @param productDF
   * @param ratingDF
   * @param config
   */
  def writeDataToMongoDB(productDF:DataFrame,ratingDF:DataFrame)(implicit config:MongoConfig): Unit ={
    //获取Mongo的client
    val client = MongoClient(MongoClientURI(config.url))
    //获取操作集合
    val productCollection = client(config.db)(RATTING_COLLECTION)
    val ratingCollection = client(config.db)(RATTING_COLLECTION)

    //删除集合
    productCollection.dropCollection()
    ratingCollection.dropCollection()

    //将productDF写入collection中
    productDF
      .write
      .option("uri",config.url)
      .option("collection",PRODUCT_COLLECTION)
      .mode(SaveMode.Overwrite)
      .format("com.mongodb.spark.sql")
      .save()

    //将ratingDF写入collection中
    ratingDF
      .write
      .option("uri",config.url)
      .option("collection",RATTING_COLLECTION)
      .mode(SaveMode.Overwrite)
      .format("com.mongodb.spark.sql")
      .save()

    //对数据创建索引
    productCollection.createIndex(MongoDBObject("productID"->1))
    ratingCollection.createIndex(MongoDBObject("productID"->1))
    ratingCollection.createIndex(MongoDBObject("userID"->1))
  }
}
