package learn.recommend

import learn.recommend.ItemCFDemo.parseRating
import org.apache.spark.mllib.evaluation.RegressionMetrics
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.mllib.recommendation.{ALS, Rating}
 
/**
  * 均方误差测试：均方误差越小，模型越好
  */
object MSEDemo {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("ItemCFDemo")
    val sc = new SparkContext(conf)
    val ratingData = sc.textFile("E:\\test\\ml-100k\\u.dat")
    val ratingsRDD = ratingData.map(parseRating(_))
    //训练ALS模型
    val model = ALS.train(ratingsRDD,50,10,0.01)
 
 
    val usersProducts = ratingsRDD.map{
      case Rating(user, product, rating)=>(user,product)
    }
    //获取所有预测评级
    val predictions = model.predict(usersProducts).map {
      case Rating(user, product, rating) => ((user, product), rating)
    }
 
    //所有真实评级
    val ratings = ratingsRDD.map{case Rating(user,product,rating)=>((user,product),rating)}
 
    //关联两个RDD，得到((user,product),(真实评级,预测评级)) RDD
    val ratingsAndPredictions = ratings.join(predictions)
 
//    val MSE = ratingsAndPredictions.map{
//      case ((user,product),(actual,predicted))=>
//        math.pow((actual-predicted),2)}.reduce(_+_)/ratingsAndPredictions.count
 
    val predictedAndTrue = ratingsAndPredictions.map {
      case ((user, product), (predicted, real)) => (predicted, real)
    }
    //求解MSE和RMSE
    val regressionMetrics = new RegressionMetrics(predictedAndTrue)
    val MSE = regressionMetrics.meanSquaredError
    val RMSE = regressionMetrics.rootMeanSquaredError
 
    println("Mean Squared Error="+MSE)
    println("Root Mean Squared Error="+RMSE)
  }
} 