package com.deep.test

import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.mllib.recommendation.ALS
import org.apache.spark.mllib.recommendation.Rating

/**
 * ALS最小二乘法
 */
object CollaborativeFilter {

  def main(args: Array[String]) {
    //设置环境变量
    val conf = new SparkConf().setMaster("local").setAppName("CollaborativeFilter")
    //实例化环境
    val sc = new SparkContext(conf)
    //设置数据集
    val data = sc.textFile("data/ml-100k/u.data")
    //处理数据
    val ratings = data.map(_.split(' ') match {
      //数据集的转换
      case Array(user, item, rate, ts) =>
        //将数据集转化为专用的Rating
        Rating(user.toInt, item.toInt, rate.toDouble)
    })
    //设置隐藏因子
    val rank = 2
    //设置迭代次数
    val numIterations = 2
    //进行模型训练
    val model = ALS.train(ratings, rank, numIterations, 0.01)
    //为用户2推荐一个商品
    val rs = model.recommendProducts(2, 1)
    //打印结果
    rs.foreach(println)
  }

}