
import java.util
import org.apache.spark.mllib.recommendation.{ALS, Rating}
import org.apache.spark.{SparkConf, SparkContext}
 
object CollaborativeFilter {
  def main(args: Array[String]) {
//设置环境变量
val conf = new SparkConf().setMaster("local").setAppName("CollaborativeFilter ")
//实例化环境
val sc = new SparkContext(conf)
//设置数据集
val data = sc.textFile("D:/测试数据/dx/u1.txt")
//处理数据
val ratings = data.map(_.split(' ') match {
case Array(user, item, rate) => 				//将数据集转化
Rating(user.toInt, item.toInt, rate.toDouble)		//将数据集转化为专用Rating
})
val rank = 5						//设置隐藏因子
val numIterations = 10					//设置迭代次数
val model = ALS.train(ratings, rank, numIterations, 0.01)	//进行模型训练
var rs = model.recommendProducts(2,3)			//为用户2推荐一个商品
rs.foreach(println)						//打印结果
}

}