package com.cw.recommend.offline

import com.cw.recommend.common.Runner.{RunnerConfig, runSpark}
import com.cw.recommend.common.constant._
import com.cw.recommend.common.feature.computeFeatureSimilarity
import com.cw.recommend.common.model._
import com.cw.recommend.common.util.MongoDBUtil.readMongoDB
import com.cw.recommend.common.util.SyntaxUtil.Syntax
import org.apache.spark.mllib.recommendation.MatrixFactorizationModel

object ALSBased {


  def main(args: Array[String]): Unit = {
    implicit val runnerConfig = RunnerConfig("recommend with als")
    runSpark { spark =>

      import spark.implicits._
      val userRatingRdd = readMongoDB(spark, RATING_COLLECTION).as[UserRating].rdd
      val userRdd = userRatingRdd.map(_.userId).distinct
      val productRdd = userRatingRdd.map(_.productId).distinct

      // ALS 模型
      val model = MatrixFactorizationModel.load(spark.sparkContext, MODEL_PATH)
      val userProduct = userRdd.cartesian(productRdd).cache()

      // 预测
      val predicts = model.predict(userProduct)

      // 转化成推荐列表
      val recommendList = predicts
        .filter(_.rating > 0)
        .groupBy(_.user)
        .map { case (u, xs) =>
          RecommendList(u,
            xs.toSeq
              .sortWith(_.rating > _.rating)
              .take(RECOMMEND_NUM)
              .map(r => RecommendItem(r.product, r.rating)))
        }
        .toDF


      // 物品相似矩阵
      val productFeatures = model.productFeatures
      val productSimList = productFeatures
        .cartesian(productFeatures)
        .filter { case (a, b) => a._1 != b._1 } |>
        (computeFeatureSimilarity(spark, _))

      productSimList.show()

      import com.cw.recommend.common.util.MongoDBUtil._
      recommendList.sinkMongoDB(RECOMMEND_LIST)
      productSimList.sinkMongoDB(PRODUCT_SIM_LIST)

    }


  }


}
