package com.lpssfxy.offline

import com.lpssfxy.offline.entities.{UserRecs,Recommendation}
import com.lpssfxy.offline.model.OfflineALSModel
import com.lpssfxy.offline.utils.AppUtils
import org.apache.spark.sql
import org.apache.spark.sql.SparkSession

object CalculateUserSimRecList {

  def main(args: Array[String]): Unit = {
    // 创建SparkSession
    val spark = AppUtils.createSparkSession("CalculateUserSimRecList",AppUtils.getSparkCores)
    // 加载数据
    val ratingRDD = AppUtils.loadRatingData(spark)
    // 进行推荐计算
    val userRecsDF = calculateUserRecommendations(spark, ratingRDD)
    // 将推荐结果写入MongoDB
    AppUtils.saveRecommendationsToMongoDB(userRecsDF,AppUtils.MONGODB_USER_RECS_COLLECTION)
    // 停止SparkSession
    spark.stop()
  }

  /**
   * 计算用户推荐列表
   * @param spark SparkSession对象
   * @param ratingRDD 评分数据的RDD
   * @return 用户推荐列表的DataFrame
   */
  private def calculateUserRecommendations(spark: SparkSession, ratingRDD: org.apache.spark.rdd.RDD[(Int, Int, Double)]):sql.DataFrame = {
    import spark.implicits._
    // 对数据进行预处理
    val userRDD = ratingRDD.map(_._1).distinct()
    val productRDD = ratingRDD.map(_._2).distinct()
    // 训练 ALS 模型
    val (rank, iterations, lambda) = (50, 10, 0.01)
    val model = OfflineALSModel.trainALSModel(ratingRDD, rank, iterations, lambda)
    // 获取预测评分矩阵，得到用户推荐列表
    val userProductsRDD = userRDD.cartesian(productRDD)
    val preRatings = model.predict(userProductsRDD)
    // 从用户推荐列表中提取前20个
    preRatings.filter(_.rating > 0).map(rating => (rating.user, (rating.product, rating.rating)))
      .groupByKey()
      .map {
        case (userId, recs) =>
          UserRecs(userId,
            recs.toList
              .sortWith(_._2 > _._2)
              .take(AppUtils.USER_MAX_RECOMMENDATION_SIZE)
              .map(x => Recommendation(x._1, x._2)))
      }.toDF()
  }
}
