package com.mininglamp.offline

import com.mongodb.spark.MongoSpark
import com.mongodb.spark.config.ReadConfig
import org.apache.spark.SparkConf
import org.apache.spark.mllib.recommendation.{ALS, Rating}
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.jblas.DoubleMatrix

/**
  * Project: ECommerceRecommendSystem
  * Package:
  * Description：离线推荐模块
  *
  * Created by ZhouPeng on 2021/12/28 12:01
  **/
object OfflineRecommender {

  val MONGODB_RATING_COLLECTION = "Rating"

  //定义mongo中存储用户商品推荐矩阵数据集
  val USER_RECS_COLLECTION = "userRecs"
  //定义mongo中存储商品相似度矩阵数据集
  val PRODUCT_RECS_COLLECTION = "productRecs"
  //用户推荐列表最大长度
  val USER_MAX_RECOMMENDATION = 20

  def main(args: Array[String]): Unit = {
    //基本配置
    val config = Map(
      "spark.cores" -> "local[*]",
      "mongo.uri" -> "mongodb://master:27017/recommender",
      "mongo.db" -> "recommender"
    )

    val sparkConf = new SparkConf().setMaster(config("spark.cores")).setAppName("OfflineRecommender")
    val sparkSession = SparkSession.builder().config(sparkConf).getOrCreate()

    import sparkSession.implicits._
    implicit val mongoConfig = MongoConfig(config("mongo.uri"), config("mongo.db"))

    //加载数据
    /*  val ratingRDD = sparkSession.read
        .option("uri", mongoConfig.mongoUri)
        .option("collection", MONGODB_RATING_COLLECTION)
        .format("com.mongodb.spark.sql")
        .load()
        .as[ProductRating]
        .rdd
        .map(
          productRating => (productRating.userId, productRating.productId, productRating.score)
        ).cache()*/

    val readConfig = ReadConfig(Map(
      "uri" -> mongoConfig.mongoUri,
      "collection" -> MONGODB_RATING_COLLECTION
    ))

    val ratingRDD = MongoSpark.load(sparkSession.sparkContext, readConfig).map(
      document => {
        ProductRating(document.getInteger("userId"), document.getInteger("productId"),
          document.getDouble("score"), document.getLong("timestamp"))
      }
    ).map(productRating => (productRating.userId, productRating.productId, productRating.score)).cache()

    //提取所有用户和商品的数据集
    val userRDD = ratingRDD.map(_._1).distinct()
    val productRDD = ratingRDD.map(_._2).distinct()

    //todo 核心计算过程
    //todo 1.训练隐语义模型
    val trainData = ratingRDD.map(ratingData => Rating(ratingData._1, ratingData._2, ratingData._3))
    // 定义模型训练的参数，rank隐特征的个数，iterations迭代次数，lambda正则化系数
    val (rank, iteration, lambda) = (5, 10, 0.01)
    val model = ALS.train(trainData, rank, iteration, lambda)

    //todo 2.利用隐语义模型得到预测用户商品评分矩阵，计算用户的商品推荐列表
    //利用userRDD和productRDD做笛卡尔积，得到空的userProductRDD表示的评分矩阵
    val userProductRDD = userRDD.cartesian(productRDD)
    val preRating = model.predict(userProductRDD)

    //从预测评分中提取得到用户的商品推荐列表
    val userRecsDF = preRating.map(
      rating => {
        (rating.user, (rating.product, rating.rating))
      }
    ).groupByKey()
      .map {
        case (user, recs) => {
          UserRecs(user, recs.toList.sortWith(_._2 > _._2)
            .take(USER_MAX_RECOMMENDATION)
            .map(x => Recommendation(x._1, x._2)))
        }
      }.toDF()
    //将商品推荐列表结果写入mongo
    storeData2Mongo(userRecsDF, USER_RECS_COLLECTION)

    //todo 3.根据商品的特征向量，计算商品的相似度列表
    val productFeatures = model.productFeatures.map {
      case (productId, features) => (productId, new DoubleMatrix(features))
    }
    //商品做笛卡尔积，两两配对，计算余弦相似度
    val productRecsDF = productFeatures.cartesian(productFeatures)
      //过滤掉商品自身
      .filter {
      case (productA, productB) => productA._1 != productB._1
    }
      //计算余弦相似度
      .map {
      case (productA, productB) => {
        val simScore = consinSim(productA._2, productB._2)
        (productA._1, (productB._1, simScore))
      }
    }
      //过滤相似分低于0.4的
      .filter(_._2._2 > 0.4)
      .groupByKey()
      .map {
        case (productId, resc) => {
          ProductRecs(productId, resc.toList.sortWith(_._2 > _._2)
            .map(x => Recommendation(x._1, x._2)))
        }
      }.toDF()
    //将商品相似度矩阵写入mongo
    storeData2Mongo(productRecsDF, PRODUCT_RECS_COLLECTION)

    sparkSession.close()
  }

  /**
    * 计算两个向量的余弦相似度
    *
    * @param matrix
    * @param matrix1
    * @return
    */
  def consinSim(matrix: DoubleMatrix, matrix1: DoubleMatrix): Double = {
    matrix.dot(matrix1) / (matrix.norm2() * matrix1.norm2())
  }

  /**
    *
    * @param df
    * @param collection_name
    * @param mongoConfig
    */
  def storeData2Mongo(df: DataFrame, collection_name: String)(implicit mongoConfig: MongoConfig): Unit = {
    df.write
      .option("uri", mongoConfig.mongoUri)
      .option("collection", collection_name)
      .format("com.mongodb.spark.sql")
      .mode("overwrite")
      .save()
  }
}

/**
  * 标准推荐对象样例类
  *
  * @param productId
  * @param score
  */
case class Recommendation(productId: Int, score: Double)


/**
  * 用户商品推荐列表样例类
  *
  * @param userId
  * @param recs
  */
case class UserRecs(userId: Int, recs: Seq[Recommendation])

/**
  * 商品相似度列表样例类
  *
  * @param productId
  * @param recs
  */
case class ProductRecs(productId: Int, recs: Seq[Recommendation])


/**
  * mongodb配置样例类
  *
  * @param mongoUri mongo连接uri
  * @param mongodb  mongo连接数据库
  */
case class MongoConfig(mongoUri: String, mongodb: String)

/**
  * ProductRating样例类
  * 4867
  * 457976
  * 5.0
  * 1395676800
  */
case class ProductRating(userId: Int, productId: Int, score: Double, timestamp: Long)
