package com.ml.als

import org.apache.log4j.{Level, Logger}
import org.apache.spark.mllib.recommendation.{ALS, Rating}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by zghgchao 2017/12/7 15:20
  *
  */
object Recommend1 {
  def main(args: Array[String]): Unit = {
    SetLogger
    val sparkconf = new SparkConf().setAppName("Recommend").setMaster("local[5]")
    val sc = new SparkContext(sparkconf)

//    sc.setCheckpointDir("checkpoint")

    val rawUserData = sc.textFile("C:/tmp/u.data") //读取数据

    //①取前三个字段：用户、产品、评分
    val rawRatings = rawUserData.map(_.split("\t").take(3))
    //②rawRatings中的每行数据转换成一个RDD
    val ratingsRDD = rawRatings.map({
      case Array(user, movie, rating) => Rating(user.toInt, movie.toInt, rating.toDouble)
    })

    val rank = 10 //设置按秩为10进行矩阵分解
    val numIterations = 20 //设置迭代次数为20次
    val alpha = 0.01 //设置矩阵分解的正则系数为0.01
    //使用ALS.train显示评分训练
    val model = ALS.train(ratingsRDD, rank, numIterations, alpha)

    //使用模型进行推荐
    val RecommendUser = model.recommendProducts(196, 5)
    RecommendUser.foreach { r =>
      println("用户id:" + r.user + "   物品id:" + r.product + "   评分:" + r.rating)
    }

    //    model.recommendUsers(464,5).mkString("\n")

    sc.stop()
  }

  def SetLogger = {
    Logger.getLogger("org").setLevel(Level.OFF)
    Logger.getLogger("com").setLevel(Level.OFF)
    System.setProperty("spark.ui.showConsoleProgress", "false")
    Logger.getRootLogger().setLevel(Level.OFF);
  }

}
