package com.datamining.rec_test

import org.apache.spark.mllib.recommendation._
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by Administrator on 2016/11/3.
  * ALS
  * rating
  */
object ALS_RatingTest {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf();
    sparkConf.setMaster("local[4]"); // 本地模式
    sparkConf.setAppName("my_test");

    val sparkContext = new SparkContext(sparkConf);
    // Rating 参数是(用户,商品,评分)的三元组: org.apache.spark.mllib.recommendation._;
    // 1.首先读取评分数据, 保存为RDD[Rating]对象
    val rating_rdd = sparkContext.textFile("file:///J:/idea_workspace/spark-test/src/main/resources/data/ml-1m/ratings.dat");

    val rating = rating_rdd.map(_.split("::") match {
      case Array(user_id, movie_id, rating, timestamp) => Rating(user_id.toInt, movie_id.toInt, rating.toDouble)
    })

    // 2.使用ALS算法求解矩阵分解
    //设置按秩为10进行矩阵分解
    val rank = 10;
    // 设置迭代次数
    val numIterations = 20;
    //设置矩阵分解的正则系数
    val alpha = 0.01;

    val model = ALS.train(rating, rank, numIterations, alpha); // 产生训练模型

    //3.利用训练模型预测一些用户评分

    // 预测用户1对商品661的评分

    println(s"预测用户1对商品661的评分 \n user 1 item 661 rating : ${model.predict(5871, 899)}")

    // 预测用户1最感兴趣的3个项目
    println("预测用户1最感兴趣的3个项目 \n")
    val u_id = 1;
    model.recommendProducts(u_id, 3).foreach(rat =>
      println(s" == user: ${u_id} item: ${rat.product} rating: ${rat.rating}")
    )

    println("预测项目最感兴趣的3个用户 \n")
    val itemId = 899;
    model.recommendUsers(itemId, 3).foreach(rat =>
      println(s" == item: ${itemId} user: ${rat.user} rating: ${rat.rating}")
    )

    //    MatrixEntry(); // i, j, value

  }
}
