package com.etc

import org.apache.spark.mllib.recommendation.{ALS, Rating}
import org.apache.spark.sql.SparkSession

/**
  * 推荐项目实战
  */
object Recommend {


  /**
    * 解析数据：将数据转换成Rating对象
    *
    * @param str
    * @return
    */
  def parseRating(str: String) = {
    val fields = str.split(",")
    Rating(fields(0).toInt, fields(1).toInt, fields(2).toFloat)
  }


  def main(args: Array[String]): Unit = {
    //定义切入点
    val spark = SparkSession.builder().master("local").appName("ASL-Demo").getOrCreate()
    //读取数据，生成RDD并转换成Rating对象
    val ratingsRDD = spark.sparkContext.textFile("u.data").map(parseRating)
    //隐藏因子数
    val rank = 50
    //最大迭代次数
    val maxIter = 10
    //正则化因子
    val labmda = 0.01
    //训练模型
    val model = ALS.train(ratingsRDD, rank, maxIter, labmda)
    //推荐物品数
    val proNum = 2
    //推荐
    val r = model.recommendProductsForUsers(proNum)
    //打印推荐结果
    r.foreach(x => {
      println("用户 " + x._1)
      x._2.foreach(x => {
        println(" 推荐物品 " + x.product + ", 预测评分 " + x.rating)
        println()
      }
      )
      println("===============================")
    }
    )

  }


}
